diff --git a/.eslintignore b/.eslintignore index 972c818d791bf..93c69b4f9b207 100644 --- a/.eslintignore +++ b/.eslintignore @@ -39,7 +39,7 @@ target /x-pack/legacy/plugins/maps/public/vendor/** # package overrides -/packages/eslint-config-kibana +/packages/elastic-eslint-config-kibana /packages/kbn-interpreter/src/common/lib/grammar.js /packages/kbn-plugin-generator/template /packages/kbn-pm/dist diff --git a/.eslintrc.js b/.eslintrc.js index ff4ac180c3774..3161a25b70870 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -94,12 +94,6 @@ module.exports = { 'jsx-a11y/no-onchange': 'off', }, }, - { - files: ['src/plugins/es_ui_shared/**/*.{js,mjs,ts,tsx}'], - rules: { - 'react-hooks/exhaustive-deps': 'off', - }, - }, { files: ['src/plugins/kibana_react/**/*.{js,mjs,ts,tsx}'], rules: { @@ -125,25 +119,12 @@ module.exports = { 'jsx-a11y/click-events-have-key-events': 'off', }, }, - { - files: ['x-pack/legacy/plugins/index_management/**/*.{js,mjs,ts,tsx}'], - rules: { - 'react-hooks/exhaustive-deps': 'off', - 'react-hooks/rules-of-hooks': 'off', - }, - }, { files: ['x-pack/plugins/ml/**/*.{js,mjs,ts,tsx}'], rules: { 'react-hooks/exhaustive-deps': 'off', }, }, - { - files: ['x-pack/legacy/plugins/snapshot_restore/**/*.{js,mjs,ts,tsx}'], - rules: { - 'react-hooks/exhaustive-deps': 'off', - }, - }, /** * Files that require Apache 2.0 headers, settings @@ -324,6 +305,8 @@ module.exports = { '!src/core/server/mocks{,.ts}', '!src/core/server/types{,.ts}', '!src/core/server/test_utils{,.ts}', + '!src/core/server/utils', // ts alias + '!src/core/server/utils/**/*', // for absolute imports until fixed in // https://github.com/elastic/kibana/issues/36096 '!src/core/server/*.test.mocks{,.ts}', diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 66fb31cc91d5a..bcb4774475849 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -63,6 +63,13 @@ /src/plugins/apm_oss/ @elastic/apm-ui /src/apm.js @watson @vigneshshanmugam +# Client Side Monitoring (lives in APM directories but owned by Uptime) +/x-pack/plugins/apm/e2e/cypress/support/step_definitions/rum @elastic/uptime +/x-pack/plugins/apm/public/application/csmApp.tsx @elastic/uptime +/x-pack/plugins/apm/public/components/app/RumDashboard @elastic/uptime +/x-pack/plugins/apm/server/lib/rum_client @elastic/uptime +/x-pack/plugins/apm/server/routes/rum_client.ts @elastic/uptime + # Beats /x-pack/legacy/plugins/beats_management/ @elastic/beats @@ -85,7 +92,6 @@ /x-pack/plugins/ingest_manager/ @elastic/ingest-management /x-pack/legacy/plugins/ingest_manager/ @elastic/ingest-management /x-pack/plugins/observability/ @elastic/observability-ui -/x-pack/legacy/plugins/monitoring/ @elastic/stack-monitoring-ui /x-pack/plugins/monitoring/ @elastic/stack-monitoring-ui /x-pack/plugins/uptime @elastic/uptime @@ -210,8 +216,19 @@ x-pack/plugins/telemetry_collection_xpack/schema/xpack_plugins.json @elastic/kib /x-pack/test/functional_with_es_ssl/fixtures/plugins/alerts/ @elastic/kibana-alerting-services # Enterprise Search -/x-pack/plugins/enterprise_search/ @elastic/app-search-frontend @elastic/workplace-search-frontend -/x-pack/test/functional_enterprise_search/ @elastic/app-search-frontend @elastic/workplace-search-frontend +# Shared +/x-pack/plugins/enterprise_search/ @elastic/enterprise-search-frontend +/x-pack/test/functional_enterprise_search/ @elastic/enterprise-search-frontend +# App Search +/x-pack/plugins/enterprise_search/public/applications/app_search @elastic/app-search-frontend +/x-pack/plugins/enterprise_search/server/routes/app_search @elastic/app-search-frontend +/x-pack/plugins/enterprise_search/server/collectors/app_search @elastic/app-search-frontend +/x-pack/plugins/enterprise_search/server/saved_objects/app_search @elastic/app-search-frontend +# Workplace Search +/x-pack/plugins/enterprise_search/public/applications/workplace_search @elastic/workplace-search-frontend +/x-pack/plugins/enterprise_search/server/routes/workplace_search @elastic/workplace-search-frontend +/x-pack/plugins/enterprise_search/server/collectors/workplace_search @elastic/workplace-search-frontend +/x-pack/plugins/enterprise_search/server/saved_objects/workplace_search @elastic/workplace-search-frontend # Elasticsearch UI /src/plugins/dev_tools/ @elastic/es-ui diff --git a/.github/ISSUE_TEMPLATE/APM.md b/.github/ISSUE_TEMPLATE/APM.md new file mode 100644 index 0000000000000..983806f70bc3f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/APM.md @@ -0,0 +1,11 @@ +--- +name: APM Issue +about: Issues related to the APM solution in Kibana +labels: Team:apm +title: [APM] +--- + +**Versions** +Kibana: (if relevant) +APM Server: (if relevant) +Elasticsearch: (if relevant) diff --git a/.gitignore b/.gitignore index 1d12ef2a9cff3..1bbd38debbf0f 100644 --- a/.gitignore +++ b/.gitignore @@ -60,3 +60,6 @@ apm.tsconfig.json # release notes script output report.csv report.asciidoc + +# TS incremental build cache +*.tsbuildinfo diff --git a/NOTICE.txt b/NOTICE.txt index e1552852d0349..d689abf4c4e05 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -281,6 +281,13 @@ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +--- +This product includes code in the function applyCubicBezierStyles that was +inspired by a public Codepen, which was available under a "MIT" license. + +Copyright (c) 2020 by Guillaume (https://codepen.io/guillaumethomas/pen/xxbbBKO) +MIT License http://www.opensource.org/licenses/mit-license + --- This product includes code that is adapted from mapbox-gl-js, which is available under a "BSD-3-Clause" license. diff --git a/docs/apm/apm-app-users.asciidoc b/docs/apm/apm-app-users.asciidoc index d766c866f87e4..3f0a42251304c 100644 --- a/docs/apm/apm-app-users.asciidoc +++ b/docs/apm/apm-app-users.asciidoc @@ -84,7 +84,7 @@ Here are two examples: | Allow the use of the the {beat_kib_app} | Spaces -| `Read` or `All` on Dashboards, Visualize, and Discover +| `Read` or `All` on Dashboards and Discover | Allow the user to view, edit, and create dashboards, as well as browse data. |==== diff --git a/docs/apm/troubleshooting.asciidoc b/docs/apm/troubleshooting.asciidoc index 65f7a378ec244..e00a67f6c78a4 100644 --- a/docs/apm/troubleshooting.asciidoc +++ b/docs/apm/troubleshooting.asciidoc @@ -49,7 +49,7 @@ GET /_template/apm-{version} *Using Logstash, Kafka, etc.* If you're not outputting data directly from APM Server to Elasticsearch (perhaps you're using Logstash or Kafka), then the index template will not be set up automatically. Instead, you'll need to -{apm-server-ref}/_manually_loading_template_configuration.html[load the template manually]. +{apm-server-ref}/configuration-template.html[load the template manually]. *Using a custom index names* This problem can also occur if you've customized the index name that you write APM data to. diff --git a/docs/canvas/canvas-elements.asciidoc b/docs/canvas/canvas-elements.asciidoc new file mode 100644 index 0000000000000..782bae061b8c1 --- /dev/null +++ b/docs/canvas/canvas-elements.asciidoc @@ -0,0 +1,167 @@ +[role="xpack"] +[[add-canvas-elements]] +=== Add elements + +Create a story about your data by adding elements to your workpad that include images, text, charts, and more. You can create your own elements and connect them to your data sources, add saved objects, and add your own images. + +[float] +[[create-canvas-element]] +==== Create an element + +Choose the type of element you want to use, then connect it to your own data. + +. Click *Add element*, then select the element you want to use. ++ +[role="screenshot"] +image::images/canvas-element-select.gif[Canvas elements] + +. To familiarize yourself with the element, use the preconfigured data demo data. ++ +By default, most of the elements you create use demo data until you change the data source. The demo data includes a small data set that you can use to experiment with your element. + +. To connect the element to your data, select *Data*, then select one of the following data sources: + +* *{es} SQL* — Access your data in {es} using SQL syntax. For information about SQL syntax, refer to {ref}/sql-spec.html[SQL language]. + +* *{es} documents* — Access your data in {es} without using aggregations. To use, select an index and fields, and optionally enter a query using the <>. Use the *{es} documents* data source when you have low volume datasets, to view raw documents, or to plot exact, non-aggregated values on a chart. + +* *Timelion* — Access your time series data using <> queries. To use Timelion queries, you can enter a query using the <>. + +Each element can display a different data source. Pages and workpads often contain multiple data sources. + +[float] +[[canvas-add-object]] +==== Add a saved object + +Add <> to your workpad, such as maps and visualizations. + +. Click *Add element > Add from Visualize Library*. + +. Select the saved object you want to add. ++ +[role="screenshot"] +image::images/canvas-map-embed.gif[] + +. To use the customization options, click the panel menu, then select one of the following options: + +* *Edit map* — Opens <> or <> so that you can edit the original saved object. + +* *Edit panel title* — Adds a title to the saved object. + +* *Customize time range* — Exposes a time filter dedicated to the saved object. + +* *Inspect* — Allows you to drill down into the element data. + +[float] +[[canvas-add-image]] +==== Add your own image + +To personalize your workpad, add your own logos and graphics. + +. Click *Add element > Manage assets*. + +. On the *Manage workpad assets* window, drag and drop your images. + +. To add the image to the workpad, click the *Create image element* icon. ++ +[role="screenshot"] +image::images/canvas-add-image.gif[] + +[float] +[[move-canvas-elements]] +==== Organize elements + +Move and resize your elements to meet your design needs. + +* To move, click and hold the element, then drag to the new location. + +* To move by 1 pixel, select the element, press and hold Shift, then use your arrow keys. + +* To move by 10 pixels, select the element, then use your arrow keys. + +* To resize, click and drag the resize handles to the new dimensions. + +[float] +[[format-canvas-elements]] +==== Format elements + +For consistency and readability across your workpad pages, align, distribute, and reorder elements. + +To align two or more elements: + +. Press and hold Shift, then select the elements you want to align. + +. Click *Edit > Alignment*, then select the alignment option. + +To distribute three or more elements: + +. Press and hold Shift, then select the elements you want to distribute. + +. Click *Edit > Distribution*, then select the distribution option. + +To reorder elements: + +. Select the element you want to reorder. + +. Click *Edit > Order*, then select the order option. + +[float] +[[data-display]] +==== Change the element display options + +Each element has its own display options to fit your design needs. + +To choose the display options, click *Display*, then make your changes. + +To define the appearance of the container and border: + +. Next to *Element style*, click *+*, then select *Container style*. + +. Expand *Container style*. + +. Change the *Appearance* and *Border* options. + +To apply CSS overrides: + +. Next to *Element style*, click *+*, then select *CSS*. + +. Enter the *CSS*. ++ +For example, to center the Markdown element, enter: ++ +[source,text] +-------------------------------------------------- +.canvasRenderEl h1 { +text.align: center; +} +-------------------------------------------------- + +. Click *Apply stylesheet*. + +[float] +[[save-elements]] +==== Save elements + +To use the elements across all workpads, save the elements. + +When you're ready to save your element, select the element, then click *Edit > Save as new element*. + +[role="screenshot"] +image::images/canvas_save_element.png[] + +To save a group of elements, press and hold Shift, select the elements you want to save, then click *Edit > Save as new element*. + +To access your saved elements, click *Add element > My elements*. + +[float] +[[delete-elements]] +==== Delete elements + +When you no longer need an element, delete it from your workpad. + +. Select the element you want to delete. + +. Click *Edit > Delete*. ++ +[role="screenshot"] +image::images/canvas_element_options.png[] diff --git a/docs/canvas/canvas-tinymath-functions.asciidoc b/docs/canvas/canvas-tinymath-functions.asciidoc index 73808fc6625d1..f92f7c642a2ee 100644 --- a/docs/canvas/canvas-tinymath-functions.asciidoc +++ b/docs/canvas/canvas-tinymath-functions.asciidoc @@ -492,37 +492,6 @@ find the mean by index. |one or more numbers or arrays of numbers |=== -*Returns*: `number` | `Array.`. The maximum value of all numbers if -`args` contains only numbers. Returns an array with the the maximum values at each -index, including all scalar numbers in `args` in the calculation at each index if -`args` contains at least one array. - -*Throws*: `'Array length mismatch'` if `args` contains arrays of different lengths - -*Example* -[source, js] ------------- -max(1, 2, 3) // returns 3 -max([10, 20, 30, 40], 15) // returns [15, 20, 30, 40] -max([1, 9], 4, [3, 5]) // returns [max([1, 4, 3]), max([9, 4, 5])] = [4, 9] ------------- - -[float] -=== mean( ...args ) - -Finds the mean value of one of more numbers/arrays of numbers passed into the function. -If at least one array of numbers is passed into the function, the function will -find the mean by index. - -[cols="3*^<"] -|=== -|Param |Type |Description - -|...args -|number \| Array. -|one or more numbers or arrays of numbers -|=== - *Returns*: `number` | `Array.`. The mean value of all numbers if `args` contains only numbers. Returns an array with the the mean values of each index, including all scalar numbers in `args` in the calculation at each index if `args` diff --git a/docs/developer/contributing/development-accessibility-tests.asciidoc b/docs/developer/contributing/development-accessibility-tests.asciidoc index facf7ff14a6c1..584d779bc7de6 100644 --- a/docs/developer/contributing/development-accessibility-tests.asciidoc +++ b/docs/developer/contributing/development-accessibility-tests.asciidoc @@ -1,23 +1,104 @@ [[development-accessibility-tests]] == Automated Accessibility Testing + +To write an accessibility test, use the provided accessibility service `getService('a11y')`. Accessibility tests are fairly straightforward to write as https://github.com/dequelabs/axe-core[axe] does most of the heavy lifting. Navigate to the UI that you need to test, then call `testAppSnapshot();` from the service imported earlier to make sure axe finds no failures. Navigate through every portion of the UI for the best coverage. + +An example test might look like this: +[source,js] +---- +export default function ({ getService, getPageObjects }) { + const { common, home } = getPageObjects(['common', 'home']); + const a11y = getService('a11y'); /* this is the wrapping service around axe */ + + describe('Kibana Home', () => { + before(async () => { + await common.navigateToApp('home'); /* navigates to the page we want to test */ + }); + + it('Kibana Home view', async () => { + await retry.waitFor( + 'home page visible', + async () => await testSubjects.exists('homeApp') + ); /* confirm you're on the correct page and that it's loaded */ + await a11y.testAppSnapshot(); /* this expects that there are no failures found by axe */ + }); + + /** + * If these tests were added by our QA team, tests that fail that require significant app code + * changes to be fixed will be skipped with a corresponding issue label with more info + */ + // Skipped due to https://github.com/elastic/kibana/issues/99999 + it.skip('all plugins view page meets a11y requirements', async () => { + await home.clickAllKibanaPlugins(); + await a11y.testAppSnapshot(); + }); + + /** + * Testing all the versions and different views of of a page is important to get good + * coverage. Things like empty states, different license levels, different permissions, and + * loaded data can all significantly change the UI which necessitates their own test. + */ + it('Add Kibana sample data page', async () => { + await common.navigateToUrl('home', '/tutorial_directory/sampleData', { + useActualUrl: true, + }); + await a11y.testAppSnapshot(); + }); + }); +} +---- + +=== Running tests To run the tests locally: [arabic] -. In one terminal window run -`node scripts/functional_tests_server --config test/accessibility/config.ts` -. In another terminal window run -`node scripts/functional_test_runner.js --config test/accessibility/config.ts` +. In one terminal window run: ++ +[source,shell] +----------- +node scripts/functional_tests_server --config test/accessibility/config.ts +----------- + +. When the server prints that it is ready, in another terminal window run: ++ +[source,shell] +----------- +node scripts/functional_test_runner.js --config test/accessibility/config.ts +----------- To run the x-pack tests, swap the config file out for `x-pack/test/accessibility/config.ts`. -After the server is up, you can go to this instance of {kib} at -`localhost:5620`. - The testing is done using https://github.com/dequelabs/axe-core[axe]. -The same thing that runs in CI, can be run locally using their browser -plugins: +You can run the same thing that runs CI using browser plugins: * https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US[Chrome] -* https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/[Firefox] \ No newline at end of file +* https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/[Firefox] + +=== Anatomy of a failure + +Failures can seem confusing if you've never seen one before. Here is a breakdown of what a failure coming from CI might look like: +[source,bash] +---- +1) Dashboard + create dashboard button: + + Error: a11y report: + + VIOLATION + [aria-hidden-focus]: Ensures aria-hidden elements do not contain focusable elements + Help: https://dequeuniversity.com/rules/axe/3.5/aria-hidden-focus?application=axeAPI + Elements: + - #example + at Accessibility.testAxeReport (test/accessibility/services/a11y/a11y.ts:90:15) + at Accessibility.testAppSnapshot (test/accessibility/services/a11y/a11y.ts:58:18) + at process._tickCallback (internal/process/next_tick.js:68:7) +---- + + +* "Dashboard" and "create dashboard button" are the names of the test suite and specific test that failed. +* Always in brackets, "[aria-hidden-focus]" is the name of the axe rule that failed, followed by a short description. +* "Help: " links to the axe documentation for that rule, including severity, remediation tips, and good and bad code examples. +* "Elements:" points to where in the DOM the failure originated (using CSS selector syntax). In this example, the problem came from an element with the ID `example`. If the selector is too complicated to find the source of the problem, use the browser plugins mentioned earlier to locate it. If you have a general idea where the issue is coming from, you can also try adding unique IDs to the page to narrow down the location. +* The stack trace points to the internals of axe. The stack trace is there in case the test failure is a bug in axe and not in your code, although this is unlikely. diff --git a/docs/developer/contributing/development-github.asciidoc b/docs/developer/contributing/development-github.asciidoc index a6d4e29940487..84f51843098a7 100644 --- a/docs/developer/contributing/development-github.asciidoc +++ b/docs/developer/contributing/development-github.asciidoc @@ -1,5 +1,5 @@ [[development-github]] -== How we use git and github +== How we use Git and GitHub [discrete] === Forking @@ -12,17 +12,21 @@ repo, which we'll refer to in later code snippets. [discrete] === Branching -* All work on the next major release goes into master. -* Past major release branches are named `{majorVersion}.x`. They contain -work that will go into the next minor release. For example, if the next -minor release is `5.2.0`, work for it should go into the `5.x` branch. -* Past minor release branches are named `{majorVersion}.{minorVersion}`. -They contain work that will go into the next patch release. For example, -if the next patch release is `5.3.1`, work for it should go into the -`5.3` branch. -* All work is done on feature branches and merged into one of these -branches. -* Where appropriate, we'll backport changes into older release branches. +At Elastic, all products in the stack, including Kibana, are released at the same time with the same version number. Most of these projects have the following branching strategy: + +* `master` is the next major version. +* `.x` is the next minor version. +* `.` is the next release of a minor version, including patch releases. + +As an example, let's assume that the `7.x` branch is currently a not-yet-released `7.6.0`. Once `7.6.0` has reached feature freeze, it will be branched to `7.6` and `7.x` will be updated to reflect `7.7.0`. The release of `7.6.0` and subsequent patch releases will be cut from the `7.6` branch. At any time, you can verify the current version of a branch by inspecting the `version` attribute in the `package.json` file within the Kibana source. + +Pull requests are made into the `master` branch and then backported when it is safe and appropriate. + +* Breaking changes do not get backported and only go into `master`. +* All non-breaking changes can be backported to the `.x` branch. +* Features should not be backported to a `.` branch. +* Bugs can be backported to a `.` branch if the changes are safe and appropriate. Safety is a judgment call you make based on factors like the bug's severity, test coverage, confidence in the changes, etc. Your reasoning should be included in the pull request description. +* Documentation changes can be backported to any branch at any time. [discrete] === Commits and Merging @@ -109,4 +113,4 @@ Assuming you've successfully rebased and you're happy with the code, you should [discrete] === Creating a pull request -See <> for the next steps on getting your code changes merged into {kib}. \ No newline at end of file +See <> for the next steps on getting your code changes merged into {kib}. diff --git a/docs/developer/getting-started/running-kibana-advanced.asciidoc b/docs/developer/getting-started/running-kibana-advanced.asciidoc index 44897184f88f2..277e52a3dc8e9 100644 --- a/docs/developer/getting-started/running-kibana-advanced.asciidoc +++ b/docs/developer/getting-started/running-kibana-advanced.asciidoc @@ -48,7 +48,7 @@ If you’re installing dependencies and seeing an error that looks something like .... -Unsupported URL Type: link:packages/eslint-config-kibana +Unsupported URL Type: link:packages/elastic-eslint-config-kibana .... you’re likely running `npm`. To install dependencies in {kib} you diff --git a/docs/development/core/public/kibana-plugin-core-public.app.capabilities.md b/docs/development/core/public/kibana-plugin-core-public.app.capabilities.md new file mode 100644 index 0000000000000..4a027a6ab132c --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.capabilities.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [capabilities](./kibana-plugin-core-public.app.capabilities.md) + +## App.capabilities property + +Custom capabilities defined by the app. + +Signature: + +```typescript +capabilities?: Partial; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.category.md b/docs/development/core/public/kibana-plugin-core-public.app.category.md new file mode 100644 index 0000000000000..a1e74f2bcf5e2 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.category.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [category](./kibana-plugin-core-public.app.category.md) + +## App.category property + +The category definition of the product See [AppCategory](./kibana-plugin-core-public.appcategory.md) See DEFAULT\_APP\_CATEGORIES for more reference + +Signature: + +```typescript +category?: AppCategory; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.defaultpath.md b/docs/development/core/public/kibana-plugin-core-public.app.defaultpath.md new file mode 100644 index 0000000000000..3c952ec053e62 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.defaultpath.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [defaultPath](./kibana-plugin-core-public.app.defaultpath.md) + +## App.defaultPath property + +Allow to define the default path a user should be directed to when navigating to the app. When defined, this value will be used as a default for the `path` option when calling [navigateToApp](./kibana-plugin-core-public.applicationstart.navigatetoapp.md)\`, and will also be appended to the [application navLink](./kibana-plugin-core-public.chromenavlink.md) in the navigation bar. + +Signature: + +```typescript +defaultPath?: string; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.euiicontype.md b/docs/development/core/public/kibana-plugin-core-public.app.euiicontype.md new file mode 100644 index 0000000000000..ff79d832f92e2 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.euiicontype.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [euiIconType](./kibana-plugin-core-public.app.euiicontype.md) + +## App.euiIconType property + +A EUI iconType that will be used for the app's icon. This icon takes precendence over the `icon` property. + +Signature: + +```typescript +euiIconType?: string; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.icon.md b/docs/development/core/public/kibana-plugin-core-public.app.icon.md new file mode 100644 index 0000000000000..98260da5d2021 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.icon.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [icon](./kibana-plugin-core-public.app.icon.md) + +## App.icon property + +A URL to an image file used as an icon. Used as a fallback if `euiIconType` is not provided. + +Signature: + +```typescript +icon?: string; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.id.md b/docs/development/core/public/kibana-plugin-core-public.app.id.md new file mode 100644 index 0000000000000..9899cfc0cf572 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.id.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [id](./kibana-plugin-core-public.app.id.md) + +## App.id property + +The unique identifier of the application + +Signature: + +```typescript +id: string; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.md b/docs/development/core/public/kibana-plugin-core-public.app.md index 8dd60972549f9..7bdee9dc4c53e 100644 --- a/docs/development/core/public/kibana-plugin-core-public.app.md +++ b/docs/development/core/public/kibana-plugin-core-public.app.md @@ -4,12 +4,11 @@ ## App interface -Extension of [common app properties](./kibana-plugin-core-public.appbase.md) with the mount function. Signature: ```typescript -export interface App extends AppBase +export interface App ``` ## Properties @@ -17,7 +16,19 @@ export interface App extends AppBase | Property | Type | Description | | --- | --- | --- | | [appRoute](./kibana-plugin-core-public.app.approute.md) | string | Override the application's routing path from /app/${id}. Must be unique across registered applications. Should not include the base path from HTTP. | +| [capabilities](./kibana-plugin-core-public.app.capabilities.md) | Partial<Capabilities> | Custom capabilities defined by the app. | +| [category](./kibana-plugin-core-public.app.category.md) | AppCategory | The category definition of the product See [AppCategory](./kibana-plugin-core-public.appcategory.md) See DEFAULT\_APP\_CATEGORIES for more reference | | [chromeless](./kibana-plugin-core-public.app.chromeless.md) | boolean | Hide the UI chrome when the application is mounted. Defaults to false. Takes precedence over chrome service visibility settings. | +| [defaultPath](./kibana-plugin-core-public.app.defaultpath.md) | string | Allow to define the default path a user should be directed to when navigating to the app. When defined, this value will be used as a default for the path option when calling [navigateToApp](./kibana-plugin-core-public.applicationstart.navigatetoapp.md)\`, and will also be appended to the [application navLink](./kibana-plugin-core-public.chromenavlink.md) in the navigation bar. | +| [euiIconType](./kibana-plugin-core-public.app.euiicontype.md) | string | A EUI iconType that will be used for the app's icon. This icon takes precendence over the icon property. | | [exactRoute](./kibana-plugin-core-public.app.exactroute.md) | boolean | If set to true, the application's route will only be checked against an exact match. Defaults to false. | +| [icon](./kibana-plugin-core-public.app.icon.md) | string | A URL to an image file used as an icon. Used as a fallback if euiIconType is not provided. | +| [id](./kibana-plugin-core-public.app.id.md) | string | The unique identifier of the application | | [mount](./kibana-plugin-core-public.app.mount.md) | AppMount<HistoryLocationState> | AppMountDeprecated<HistoryLocationState> | A mount function called when the user navigates to this app's route. May have signature of [AppMount](./kibana-plugin-core-public.appmount.md) or [AppMountDeprecated](./kibana-plugin-core-public.appmountdeprecated.md). | +| [navLinkStatus](./kibana-plugin-core-public.app.navlinkstatus.md) | AppNavLinkStatus | The initial status of the application's navLink. Defaulting to visible if status is accessible and hidden if status is inaccessible See [AppNavLinkStatus](./kibana-plugin-core-public.appnavlinkstatus.md) | +| [order](./kibana-plugin-core-public.app.order.md) | number | An ordinal used to sort nav links relative to one another for display. | +| [status](./kibana-plugin-core-public.app.status.md) | AppStatus | The initial status of the application. Defaulting to accessible | +| [title](./kibana-plugin-core-public.app.title.md) | string | The title of the application. | +| [tooltip](./kibana-plugin-core-public.app.tooltip.md) | string | A tooltip shown when hovering over app link. | +| [updater$](./kibana-plugin-core-public.app.updater_.md) | Observable<AppUpdater> | An [AppUpdater](./kibana-plugin-core-public.appupdater.md) observable that can be used to update the application [AppUpdatableFields](./kibana-plugin-core-public.appupdatablefields.md) at runtime. | diff --git a/docs/development/core/public/kibana-plugin-core-public.app.navlinkstatus.md b/docs/development/core/public/kibana-plugin-core-public.app.navlinkstatus.md new file mode 100644 index 0000000000000..c01a26e42e237 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.navlinkstatus.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [navLinkStatus](./kibana-plugin-core-public.app.navlinkstatus.md) + +## App.navLinkStatus property + +The initial status of the application's navLink. Defaulting to `visible` if `status` is `accessible` and `hidden` if status is `inaccessible` See [AppNavLinkStatus](./kibana-plugin-core-public.appnavlinkstatus.md) + +Signature: + +```typescript +navLinkStatus?: AppNavLinkStatus; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.order.md b/docs/development/core/public/kibana-plugin-core-public.app.order.md new file mode 100644 index 0000000000000..bb6be116b6b58 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.order.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [order](./kibana-plugin-core-public.app.order.md) + +## App.order property + +An ordinal used to sort nav links relative to one another for display. + +Signature: + +```typescript +order?: number; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.status.md b/docs/development/core/public/kibana-plugin-core-public.app.status.md new file mode 100644 index 0000000000000..caa6ff1dcac9e --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.status.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [status](./kibana-plugin-core-public.app.status.md) + +## App.status property + +The initial status of the application. Defaulting to `accessible` + +Signature: + +```typescript +status?: AppStatus; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.title.md b/docs/development/core/public/kibana-plugin-core-public.app.title.md new file mode 100644 index 0000000000000..c705e3ab8d2b1 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.title.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [title](./kibana-plugin-core-public.app.title.md) + +## App.title property + +The title of the application. + +Signature: + +```typescript +title: string; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.tooltip.md b/docs/development/core/public/kibana-plugin-core-public.app.tooltip.md new file mode 100644 index 0000000000000..e901de0fdccc9 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.tooltip.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [tooltip](./kibana-plugin-core-public.app.tooltip.md) + +## App.tooltip property + +A tooltip shown when hovering over app link. + +Signature: + +```typescript +tooltip?: string; +``` diff --git a/docs/development/core/public/kibana-plugin-core-public.app.updater_.md b/docs/development/core/public/kibana-plugin-core-public.app.updater_.md new file mode 100644 index 0000000000000..67acccbd02965 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.updater_.md @@ -0,0 +1,44 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [updater$](./kibana-plugin-core-public.app.updater_.md) + +## App.updater$ property + +An [AppUpdater](./kibana-plugin-core-public.appupdater.md) observable that can be used to update the application [AppUpdatableFields](./kibana-plugin-core-public.appupdatablefields.md) at runtime. + +Signature: + +```typescript +updater$?: Observable; +``` + +## Example + +How to update an application navLink at runtime + +```ts +// inside your plugin's setup function +export class MyPlugin implements Plugin { + private appUpdater = new BehaviorSubject(() => ({})); + + setup({ application }) { + application.register({ + id: 'my-app', + title: 'My App', + updater$: this.appUpdater, + async mount(params) { + const { renderApp } = await import('./application'); + return renderApp(params); + }, + }); + } + + start() { + // later, when the navlink needs to be updated + appUpdater.next(() => { + navLinkStatus: AppNavLinkStatus.disabled, + }) + } + +``` + diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.capabilities.md b/docs/development/core/public/kibana-plugin-core-public.appbase.capabilities.md deleted file mode 100644 index 3dd440c4253b3..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.capabilities.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [capabilities](./kibana-plugin-core-public.appbase.capabilities.md) - -## AppBase.capabilities property - -Custom capabilities defined by the app. - -Signature: - -```typescript -capabilities?: Partial; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.category.md b/docs/development/core/public/kibana-plugin-core-public.appbase.category.md deleted file mode 100644 index 29532a15747e1..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.category.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [category](./kibana-plugin-core-public.appbase.category.md) - -## AppBase.category property - -The category definition of the product See [AppCategory](./kibana-plugin-core-public.appcategory.md) See DEFAULT\_APP\_CATEGORIES for more reference - -Signature: - -```typescript -category?: AppCategory; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.chromeless.md b/docs/development/core/public/kibana-plugin-core-public.appbase.chromeless.md deleted file mode 100644 index 793eab4b5bdfa..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.chromeless.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [chromeless](./kibana-plugin-core-public.appbase.chromeless.md) - -## AppBase.chromeless property - -Hide the UI chrome when the application is mounted. Defaults to `false`. Takes precedence over chrome service visibility settings. - -Signature: - -```typescript -chromeless?: boolean; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.defaultpath.md b/docs/development/core/public/kibana-plugin-core-public.appbase.defaultpath.md deleted file mode 100644 index 51492756ef232..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.defaultpath.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [defaultPath](./kibana-plugin-core-public.appbase.defaultpath.md) - -## AppBase.defaultPath property - -Allow to define the default path a user should be directed to when navigating to the app. When defined, this value will be used as a default for the `path` option when calling [navigateToApp](./kibana-plugin-core-public.applicationstart.navigatetoapp.md)\`, and will also be appended to the [application navLink](./kibana-plugin-core-public.chromenavlink.md) in the navigation bar. - -Signature: - -```typescript -defaultPath?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.euiicontype.md b/docs/development/core/public/kibana-plugin-core-public.appbase.euiicontype.md deleted file mode 100644 index e5bfa38097361..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.euiicontype.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [euiIconType](./kibana-plugin-core-public.appbase.euiicontype.md) - -## AppBase.euiIconType property - -A EUI iconType that will be used for the app's icon. This icon takes precendence over the `icon` property. - -Signature: - -```typescript -euiIconType?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.icon.md b/docs/development/core/public/kibana-plugin-core-public.appbase.icon.md deleted file mode 100644 index 0bd67922dc39c..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.icon.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [icon](./kibana-plugin-core-public.appbase.icon.md) - -## AppBase.icon property - -A URL to an image file used as an icon. Used as a fallback if `euiIconType` is not provided. - -Signature: - -```typescript -icon?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.id.md b/docs/development/core/public/kibana-plugin-core-public.appbase.id.md deleted file mode 100644 index 6c0ec462fa16b..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.id.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [id](./kibana-plugin-core-public.appbase.id.md) - -## AppBase.id property - -The unique identifier of the application - -Signature: - -```typescript -id: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.md b/docs/development/core/public/kibana-plugin-core-public.appbase.md deleted file mode 100644 index 7b624f12ac1df..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.md +++ /dev/null @@ -1,31 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) - -## AppBase interface - - -Signature: - -```typescript -export interface AppBase -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [capabilities](./kibana-plugin-core-public.appbase.capabilities.md) | Partial<Capabilities> | Custom capabilities defined by the app. | -| [category](./kibana-plugin-core-public.appbase.category.md) | AppCategory | The category definition of the product See [AppCategory](./kibana-plugin-core-public.appcategory.md) See DEFAULT\_APP\_CATEGORIES for more reference | -| [chromeless](./kibana-plugin-core-public.appbase.chromeless.md) | boolean | Hide the UI chrome when the application is mounted. Defaults to false. Takes precedence over chrome service visibility settings. | -| [defaultPath](./kibana-plugin-core-public.appbase.defaultpath.md) | string | Allow to define the default path a user should be directed to when navigating to the app. When defined, this value will be used as a default for the path option when calling [navigateToApp](./kibana-plugin-core-public.applicationstart.navigatetoapp.md)\`, and will also be appended to the [application navLink](./kibana-plugin-core-public.chromenavlink.md) in the navigation bar. | -| [euiIconType](./kibana-plugin-core-public.appbase.euiicontype.md) | string | A EUI iconType that will be used for the app's icon. This icon takes precendence over the icon property. | -| [icon](./kibana-plugin-core-public.appbase.icon.md) | string | A URL to an image file used as an icon. Used as a fallback if euiIconType is not provided. | -| [id](./kibana-plugin-core-public.appbase.id.md) | string | The unique identifier of the application | -| [navLinkStatus](./kibana-plugin-core-public.appbase.navlinkstatus.md) | AppNavLinkStatus | The initial status of the application's navLink. Defaulting to visible if status is accessible and hidden if status is inaccessible See [AppNavLinkStatus](./kibana-plugin-core-public.appnavlinkstatus.md) | -| [order](./kibana-plugin-core-public.appbase.order.md) | number | An ordinal used to sort nav links relative to one another for display. | -| [status](./kibana-plugin-core-public.appbase.status.md) | AppStatus | The initial status of the application. Defaulting to accessible | -| [title](./kibana-plugin-core-public.appbase.title.md) | string | The title of the application. | -| [tooltip](./kibana-plugin-core-public.appbase.tooltip.md) | string | A tooltip shown when hovering over app link. | -| [updater$](./kibana-plugin-core-public.appbase.updater_.md) | Observable<AppUpdater> | An [AppUpdater](./kibana-plugin-core-public.appupdater.md) observable that can be used to update the application [AppUpdatableFields](./kibana-plugin-core-public.appupdatablefields.md) at runtime. | - diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.navlinkstatus.md b/docs/development/core/public/kibana-plugin-core-public.appbase.navlinkstatus.md deleted file mode 100644 index decfb235b2858..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.navlinkstatus.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [navLinkStatus](./kibana-plugin-core-public.appbase.navlinkstatus.md) - -## AppBase.navLinkStatus property - -The initial status of the application's navLink. Defaulting to `visible` if `status` is `accessible` and `hidden` if status is `inaccessible` See [AppNavLinkStatus](./kibana-plugin-core-public.appnavlinkstatus.md) - -Signature: - -```typescript -navLinkStatus?: AppNavLinkStatus; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.order.md b/docs/development/core/public/kibana-plugin-core-public.appbase.order.md deleted file mode 100644 index 606a40e72d592..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.order.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [order](./kibana-plugin-core-public.appbase.order.md) - -## AppBase.order property - -An ordinal used to sort nav links relative to one another for display. - -Signature: - -```typescript -order?: number; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.status.md b/docs/development/core/public/kibana-plugin-core-public.appbase.status.md deleted file mode 100644 index 4d6ba6ebd955e..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.status.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [status](./kibana-plugin-core-public.appbase.status.md) - -## AppBase.status property - -The initial status of the application. Defaulting to `accessible` - -Signature: - -```typescript -status?: AppStatus; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.title.md b/docs/development/core/public/kibana-plugin-core-public.appbase.title.md deleted file mode 100644 index d6058badee8e8..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.title.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [title](./kibana-plugin-core-public.appbase.title.md) - -## AppBase.title property - -The title of the application. - -Signature: - -```typescript -title: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.tooltip.md b/docs/development/core/public/kibana-plugin-core-public.appbase.tooltip.md deleted file mode 100644 index 0c0b0840eb921..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.tooltip.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [tooltip](./kibana-plugin-core-public.appbase.tooltip.md) - -## AppBase.tooltip property - -A tooltip shown when hovering over app link. - -Signature: - -```typescript -tooltip?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appbase.updater_.md b/docs/development/core/public/kibana-plugin-core-public.appbase.updater_.md deleted file mode 100644 index c2c572755f9b2..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.appbase.updater_.md +++ /dev/null @@ -1,44 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppBase](./kibana-plugin-core-public.appbase.md) > [updater$](./kibana-plugin-core-public.appbase.updater_.md) - -## AppBase.updater$ property - -An [AppUpdater](./kibana-plugin-core-public.appupdater.md) observable that can be used to update the application [AppUpdatableFields](./kibana-plugin-core-public.appupdatablefields.md) at runtime. - -Signature: - -```typescript -updater$?: Observable; -``` - -## Example - -How to update an application navLink at runtime - -```ts -// inside your plugin's setup function -export class MyPlugin implements Plugin { - private appUpdater = new BehaviorSubject(() => ({})); - - setup({ application }) { - application.register({ - id: 'my-app', - title: 'My App', - updater$: this.appUpdater, - async mount(params) { - const { renderApp } = await import('./application'); - return renderApp(params); - }, - }); - } - - start() { - // later, when the navlink needs to be updated - appUpdater.next(() => { - navLinkStatus: AppNavLinkStatus.disabled, - }) - } - -``` - diff --git a/docs/development/core/public/kibana-plugin-core-public.applicationstart.applications_.md b/docs/development/core/public/kibana-plugin-core-public.applicationstart.applications_.md index d428faa500faf..bcc5435f35951 100644 --- a/docs/development/core/public/kibana-plugin-core-public.applicationstart.applications_.md +++ b/docs/development/core/public/kibana-plugin-core-public.applicationstart.applications_.md @@ -9,7 +9,7 @@ Observable emitting the list of currently registered apps and their associated s Signature: ```typescript -applications$: Observable>; +applications$: Observable>; ``` ## Remarks diff --git a/docs/development/core/public/kibana-plugin-core-public.applicationstart.md b/docs/development/core/public/kibana-plugin-core-public.applicationstart.md index 896de2de32dd5..00318f32984e9 100644 --- a/docs/development/core/public/kibana-plugin-core-public.applicationstart.md +++ b/docs/development/core/public/kibana-plugin-core-public.applicationstart.md @@ -15,7 +15,7 @@ export interface ApplicationStart | Property | Type | Description | | --- | --- | --- | -| [applications$](./kibana-plugin-core-public.applicationstart.applications_.md) | Observable<ReadonlyMap<string, PublicAppInfo | PublicLegacyAppInfo>> | Observable emitting the list of currently registered apps and their associated status. | +| [applications$](./kibana-plugin-core-public.applicationstart.applications_.md) | Observable<ReadonlyMap<string, PublicAppInfo>> | Observable emitting the list of currently registered apps and their associated status. | | [capabilities](./kibana-plugin-core-public.applicationstart.capabilities.md) | RecursiveReadonly<Capabilities> | Gets the read-only capabilities. | | [currentAppId$](./kibana-plugin-core-public.applicationstart.currentappid_.md) | Observable<string | undefined> | An observable that emits the current application id and each subsequent id update. | diff --git a/docs/development/core/public/kibana-plugin-core-public.appmountparameters.md b/docs/development/core/public/kibana-plugin-core-public.appmountparameters.md index de79fc8281c45..f6c57603bedde 100644 --- a/docs/development/core/public/kibana-plugin-core-public.appmountparameters.md +++ b/docs/development/core/public/kibana-plugin-core-public.appmountparameters.md @@ -19,4 +19,5 @@ export interface AppMountParameters | [element](./kibana-plugin-core-public.appmountparameters.element.md) | HTMLElement | The container element to render the application into. | | [history](./kibana-plugin-core-public.appmountparameters.history.md) | ScopedHistory<HistoryLocationState> | A scoped history instance for your application. Should be used to wire up your applications Router. | | [onAppLeave](./kibana-plugin-core-public.appmountparameters.onappleave.md) | (handler: AppLeaveHandler) => void | A function that can be used to register a handler that will be called when the user is leaving the current application, allowing to prompt a confirmation message before actually changing the page.This will be called either when the user goes to another application, or when trying to close the tab or manually changing the url. | +| [setHeaderActionMenu](./kibana-plugin-core-public.appmountparameters.setheaderactionmenu.md) | (menuMount: MountPoint | undefined) => void | A function that can be used to set the mount point used to populate the application action container in the chrome header.Calling the handler multiple time will erase the current content of the action menu with the mount from the latest call. Calling the handler with undefined will unmount the current mount point. Calling the handler after the application has been unmounted will have no effect. | diff --git a/docs/development/core/public/kibana-plugin-core-public.appmountparameters.setheaderactionmenu.md b/docs/development/core/public/kibana-plugin-core-public.appmountparameters.setheaderactionmenu.md new file mode 100644 index 0000000000000..ca9cee64bb1f9 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.appmountparameters.setheaderactionmenu.md @@ -0,0 +1,39 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [AppMountParameters](./kibana-plugin-core-public.appmountparameters.md) > [setHeaderActionMenu](./kibana-plugin-core-public.appmountparameters.setheaderactionmenu.md) + +## AppMountParameters.setHeaderActionMenu property + +A function that can be used to set the mount point used to populate the application action container in the chrome header. + +Calling the handler multiple time will erase the current content of the action menu with the mount from the latest call. Calling the handler with `undefined` will unmount the current mount point. Calling the handler after the application has been unmounted will have no effect. + +Signature: + +```typescript +setHeaderActionMenu: (menuMount: MountPoint | undefined) => void; +``` + +## Example + + +```ts +// application.tsx +import React from 'react'; +import ReactDOM from 'react-dom'; +import { BrowserRouter, Route } from 'react-router-dom'; + +import { CoreStart, AppMountParameters } from 'src/core/public'; +import { MyPluginDepsStart } from './plugin'; + +export renderApp = ({ element, history, setHeaderActionMenu }: AppMountParameters) => { + const { renderApp } = await import('./application'); + const { renderActionMenu } = await import('./action_menu'); + setHeaderActionMenu((element) => { + return renderActionMenu(element); + }) + return renderApp({ element, history }); +} + +``` + diff --git a/docs/development/core/public/kibana-plugin-core-public.appupdatablefields.md b/docs/development/core/public/kibana-plugin-core-public.appupdatablefields.md index 3d8b5d115c8a2..1232b7f940255 100644 --- a/docs/development/core/public/kibana-plugin-core-public.appupdatablefields.md +++ b/docs/development/core/public/kibana-plugin-core-public.appupdatablefields.md @@ -9,5 +9,5 @@ Defines the list of fields that can be updated via an [AppUpdater](./kibana-plug Signature: ```typescript -export declare type AppUpdatableFields = Pick; +export declare type AppUpdatableFields = Pick; ``` diff --git a/docs/development/core/public/kibana-plugin-core-public.appupdater.md b/docs/development/core/public/kibana-plugin-core-public.appupdater.md index a1c1424132da6..744c52f221da7 100644 --- a/docs/development/core/public/kibana-plugin-core-public.appupdater.md +++ b/docs/development/core/public/kibana-plugin-core-public.appupdater.md @@ -9,5 +9,5 @@ Updater for applications. see [ApplicationSetup](./kibana-plugin-core-public.app Signature: ```typescript -export declare type AppUpdater = (app: AppBase) => Partial | undefined; +export declare type AppUpdater = (app: App) => Partial | undefined; ``` diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.active.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.active.md deleted file mode 100644 index fb8a6eb691b42..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.active.md +++ /dev/null @@ -1,17 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [ChromeNavLink](./kibana-plugin-core-public.chromenavlink.md) > [active](./kibana-plugin-core-public.chromenavlink.active.md) - -## ChromeNavLink.active property - -> Warning: This API is now obsolete. -> -> - -Indicates whether or not this app is currently on the screen. - -Signature: - -```typescript -readonly active?: boolean; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.disabled.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.disabled.md index 9e1aefb79ad39..2b4d22be187f9 100644 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.disabled.md +++ b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.disabled.md @@ -4,10 +4,6 @@ ## ChromeNavLink.disabled property -> Warning: This API is now obsolete. -> -> - Disables a link from being clickable. Signature: diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.disablesuburltracking.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.disablesuburltracking.md deleted file mode 100644 index 843fd959d262a..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.disablesuburltracking.md +++ /dev/null @@ -1,17 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [ChromeNavLink](./kibana-plugin-core-public.chromenavlink.md) > [disableSubUrlTracking](./kibana-plugin-core-public.chromenavlink.disablesuburltracking.md) - -## ChromeNavLink.disableSubUrlTracking property - -> Warning: This API is now obsolete. -> -> - -A flag that tells legacy chrome to ignore the link when tracking sub-urls - -Signature: - -```typescript -readonly disableSubUrlTracking?: boolean; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.href.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.href.md index a8af0c997ca78..f51fa7e5b1355 100644 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.href.md +++ b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.href.md @@ -9,5 +9,5 @@ Settled state between `url`, `baseUrl`, and `active` Signature: ```typescript -readonly href?: string; +readonly href: string; ``` diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.linktolastsuburl.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.linktolastsuburl.md deleted file mode 100644 index 0b6d6ae129744..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.linktolastsuburl.md +++ /dev/null @@ -1,17 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [ChromeNavLink](./kibana-plugin-core-public.chromenavlink.md) > [linkToLastSubUrl](./kibana-plugin-core-public.chromenavlink.linktolastsuburl.md) - -## ChromeNavLink.linkToLastSubUrl property - -> Warning: This API is now obsolete. -> -> - -Whether or not the subUrl feature should be enabled. - -Signature: - -```typescript -readonly linkToLastSubUrl?: boolean; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.md index 0349e865bff97..dfe8f119505aa 100644 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.md +++ b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.md @@ -15,20 +15,16 @@ export interface ChromeNavLink | Property | Type | Description | | --- | --- | --- | -| [active](./kibana-plugin-core-public.chromenavlink.active.md) | boolean | Indicates whether or not this app is currently on the screen. | | [baseUrl](./kibana-plugin-core-public.chromenavlink.baseurl.md) | string | The base route used to open the root of an application. | | [category](./kibana-plugin-core-public.chromenavlink.category.md) | AppCategory | The category the app lives in | | [disabled](./kibana-plugin-core-public.chromenavlink.disabled.md) | boolean | Disables a link from being clickable. | -| [disableSubUrlTracking](./kibana-plugin-core-public.chromenavlink.disablesuburltracking.md) | boolean | A flag that tells legacy chrome to ignore the link when tracking sub-urls | | [euiIconType](./kibana-plugin-core-public.chromenavlink.euiicontype.md) | string | A EUI iconType that will be used for the app's icon. This icon takes precedence over the icon property. | | [hidden](./kibana-plugin-core-public.chromenavlink.hidden.md) | boolean | Hides a link from the navigation. | | [href](./kibana-plugin-core-public.chromenavlink.href.md) | string | Settled state between url, baseUrl, and active | | [icon](./kibana-plugin-core-public.chromenavlink.icon.md) | string | A URL to an image file used as an icon. Used as a fallback if euiIconType is not provided. | | [id](./kibana-plugin-core-public.chromenavlink.id.md) | string | A unique identifier for looking up links. | -| [linkToLastSubUrl](./kibana-plugin-core-public.chromenavlink.linktolastsuburl.md) | boolean | Whether or not the subUrl feature should be enabled. | | [order](./kibana-plugin-core-public.chromenavlink.order.md) | number | An ordinal used to sort nav links relative to one another for display. | -| [subUrlBase](./kibana-plugin-core-public.chromenavlink.suburlbase.md) | string | A url base that legacy apps can set to match deep URLs to an application. | | [title](./kibana-plugin-core-public.chromenavlink.title.md) | string | The title of the application. | | [tooltip](./kibana-plugin-core-public.chromenavlink.tooltip.md) | string | A tooltip shown when hovering over an app link. | -| [url](./kibana-plugin-core-public.chromenavlink.url.md) | string | The route used to open the [default path](./kibana-plugin-core-public.appbase.defaultpath.md) of an application. If unset, baseUrl will be used instead. | +| [url](./kibana-plugin-core-public.chromenavlink.url.md) | string | The route used to open the of an application. If unset, baseUrl will be used instead. | diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.suburlbase.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.suburlbase.md deleted file mode 100644 index 047a1d83b137f..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.suburlbase.md +++ /dev/null @@ -1,17 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [ChromeNavLink](./kibana-plugin-core-public.chromenavlink.md) > [subUrlBase](./kibana-plugin-core-public.chromenavlink.suburlbase.md) - -## ChromeNavLink.subUrlBase property - -> Warning: This API is now obsolete. -> -> - -A url base that legacy apps can set to match deep URLs to an application. - -Signature: - -```typescript -readonly subUrlBase?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.url.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.url.md index 1e0b890015993..833930c494786 100644 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlink.url.md +++ b/docs/development/core/public/kibana-plugin-core-public.chromenavlink.url.md @@ -4,7 +4,7 @@ ## ChromeNavLink.url property -The route used to open the [default path](./kibana-plugin-core-public.appbase.defaultpath.md) of an application. If unset, `baseUrl` will be used instead. +The route used to open the of an application. If unset, `baseUrl` will be used instead. Signature: diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlinks.update.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlinks.update.md index 5741a4c98f895..7948f2f8543fd 100644 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlinks.update.md +++ b/docs/development/core/public/kibana-plugin-core-public.chromenavlinks.update.md @@ -6,7 +6,7 @@ > Warning: This API is now obsolete. > -> Uses the [AppBase.updater$](./kibana-plugin-core-public.appbase.updater_.md) property when registering your application with [ApplicationSetup.register()](./kibana-plugin-core-public.applicationsetup.register.md) instead. +> Uses the property when registering your application with [ApplicationSetup.register()](./kibana-plugin-core-public.applicationsetup.register.md) instead. > Update the navlink for the given id with the updated attributes. Returns the updated navlink or `undefined` if it does not exist. diff --git a/docs/development/core/public/kibana-plugin-core-public.chromenavlinkupdateablefields.md b/docs/development/core/public/kibana-plugin-core-public.chromenavlinkupdateablefields.md index bd5a1399cded7..0445bb28bb355 100644 --- a/docs/development/core/public/kibana-plugin-core-public.chromenavlinkupdateablefields.md +++ b/docs/development/core/public/kibana-plugin-core-public.chromenavlinkupdateablefields.md @@ -8,5 +8,5 @@ Signature: ```typescript -export declare type ChromeNavLinkUpdateableFields = Partial>; +export declare type ChromeNavLinkUpdateableFields = Partial>; ``` diff --git a/docs/development/core/public/kibana-plugin-core-public.coresetup.injectedmetadata.md b/docs/development/core/public/kibana-plugin-core-public.coresetup.injectedmetadata.md index b8f2699b677b0..8c845c621e0d7 100644 --- a/docs/development/core/public/kibana-plugin-core-public.coresetup.injectedmetadata.md +++ b/docs/development/core/public/kibana-plugin-core-public.coresetup.injectedmetadata.md @@ -8,7 +8,7 @@ > > -exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. Use the legacy platform API instead. +exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. Signature: diff --git a/docs/development/core/public/kibana-plugin-core-public.coresetup.md b/docs/development/core/public/kibana-plugin-core-public.coresetup.md index 870fa33dce900..b9f97b83af88f 100644 --- a/docs/development/core/public/kibana-plugin-core-public.coresetup.md +++ b/docs/development/core/public/kibana-plugin-core-public.coresetup.md @@ -21,7 +21,7 @@ export interface CoreSetupFatalErrorsSetup | [FatalErrorsSetup](./kibana-plugin-core-public.fatalerrorssetup.md) | | [getStartServices](./kibana-plugin-core-public.coresetup.getstartservices.md) | StartServicesAccessor<TPluginsStart, TStart> | [StartServicesAccessor](./kibana-plugin-core-public.startservicesaccessor.md) | | [http](./kibana-plugin-core-public.coresetup.http.md) | HttpSetup | [HttpSetup](./kibana-plugin-core-public.httpsetup.md) | -| [injectedMetadata](./kibana-plugin-core-public.coresetup.injectedmetadata.md) | {
getInjectedVar: (name: string, defaultValue?: any) => unknown;
} | exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. Use the legacy platform API instead. | +| [injectedMetadata](./kibana-plugin-core-public.coresetup.injectedmetadata.md) | {
getInjectedVar: (name: string, defaultValue?: any) => unknown;
} | exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. | | [notifications](./kibana-plugin-core-public.coresetup.notifications.md) | NotificationsSetup | [NotificationsSetup](./kibana-plugin-core-public.notificationssetup.md) | | [uiSettings](./kibana-plugin-core-public.coresetup.uisettings.md) | IUiSettingsClient | [IUiSettingsClient](./kibana-plugin-core-public.iuisettingsclient.md) | diff --git a/docs/development/core/public/kibana-plugin-core-public.corestart.injectedmetadata.md b/docs/development/core/public/kibana-plugin-core-public.corestart.injectedmetadata.md index 45f9349ae8c61..4e9bf7c4bc0d5 100644 --- a/docs/development/core/public/kibana-plugin-core-public.corestart.injectedmetadata.md +++ b/docs/development/core/public/kibana-plugin-core-public.corestart.injectedmetadata.md @@ -8,7 +8,7 @@ > > -exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. Use the legacy platform API instead. +exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. Signature: diff --git a/docs/development/core/public/kibana-plugin-core-public.corestart.md b/docs/development/core/public/kibana-plugin-core-public.corestart.md index cb4a825a825b1..a7b45b318d2c9 100644 --- a/docs/development/core/public/kibana-plugin-core-public.corestart.md +++ b/docs/development/core/public/kibana-plugin-core-public.corestart.md @@ -22,7 +22,7 @@ export interface CoreStart | [fatalErrors](./kibana-plugin-core-public.corestart.fatalerrors.md) | FatalErrorsStart | [FatalErrorsStart](./kibana-plugin-core-public.fatalerrorsstart.md) | | [http](./kibana-plugin-core-public.corestart.http.md) | HttpStart | [HttpStart](./kibana-plugin-core-public.httpstart.md) | | [i18n](./kibana-plugin-core-public.corestart.i18n.md) | I18nStart | [I18nStart](./kibana-plugin-core-public.i18nstart.md) | -| [injectedMetadata](./kibana-plugin-core-public.corestart.injectedmetadata.md) | {
getInjectedVar: (name: string, defaultValue?: any) => unknown;
} | exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. Use the legacy platform API instead. | +| [injectedMetadata](./kibana-plugin-core-public.corestart.injectedmetadata.md) | {
getInjectedVar: (name: string, defaultValue?: any) => unknown;
} | exposed temporarily until https://github.com/elastic/kibana/issues/41990 done use \*only\* to retrieve config values. There is no way to set injected values in the new platform. | | [notifications](./kibana-plugin-core-public.corestart.notifications.md) | NotificationsStart | [NotificationsStart](./kibana-plugin-core-public.notificationsstart.md) | | [overlays](./kibana-plugin-core-public.corestart.overlays.md) | OverlayStart | [OverlayStart](./kibana-plugin-core-public.overlaystart.md) | | [savedObjects](./kibana-plugin-core-public.corestart.savedobjects.md) | SavedObjectsStart | [SavedObjectsStart](./kibana-plugin-core-public.savedobjectsstart.md) | diff --git a/docs/development/core/public/kibana-plugin-core-public.legacyapp.appurl.md b/docs/development/core/public/kibana-plugin-core-public.legacyapp.appurl.md deleted file mode 100644 index 292bf29962839..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacyapp.appurl.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyApp](./kibana-plugin-core-public.legacyapp.md) > [appUrl](./kibana-plugin-core-public.legacyapp.appurl.md) - -## LegacyApp.appUrl property - -Signature: - -```typescript -appUrl: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacyapp.disablesuburltracking.md b/docs/development/core/public/kibana-plugin-core-public.legacyapp.disablesuburltracking.md deleted file mode 100644 index af4d0eb7969d3..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacyapp.disablesuburltracking.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyApp](./kibana-plugin-core-public.legacyapp.md) > [disableSubUrlTracking](./kibana-plugin-core-public.legacyapp.disablesuburltracking.md) - -## LegacyApp.disableSubUrlTracking property - -Signature: - -```typescript -disableSubUrlTracking?: boolean; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacyapp.linktolastsuburl.md b/docs/development/core/public/kibana-plugin-core-public.legacyapp.linktolastsuburl.md deleted file mode 100644 index fa1314b74fd83..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacyapp.linktolastsuburl.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyApp](./kibana-plugin-core-public.legacyapp.md) > [linkToLastSubUrl](./kibana-plugin-core-public.legacyapp.linktolastsuburl.md) - -## LegacyApp.linkToLastSubUrl property - -Signature: - -```typescript -linkToLastSubUrl?: boolean; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacyapp.md b/docs/development/core/public/kibana-plugin-core-public.legacyapp.md deleted file mode 100644 index 06533aaa99170..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacyapp.md +++ /dev/null @@ -1,22 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyApp](./kibana-plugin-core-public.legacyapp.md) - -## LegacyApp interface - - -Signature: - -```typescript -export interface LegacyApp extends AppBase -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [appUrl](./kibana-plugin-core-public.legacyapp.appurl.md) | string | | -| [disableSubUrlTracking](./kibana-plugin-core-public.legacyapp.disablesuburltracking.md) | boolean | | -| [linkToLastSubUrl](./kibana-plugin-core-public.legacyapp.linktolastsuburl.md) | boolean | | -| [subUrlBase](./kibana-plugin-core-public.legacyapp.suburlbase.md) | string | | - diff --git a/docs/development/core/public/kibana-plugin-core-public.legacyapp.suburlbase.md b/docs/development/core/public/kibana-plugin-core-public.legacyapp.suburlbase.md deleted file mode 100644 index 44a1e52ccd244..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacyapp.suburlbase.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyApp](./kibana-plugin-core-public.legacyapp.md) > [subUrlBase](./kibana-plugin-core-public.legacyapp.suburlbase.md) - -## LegacyApp.subUrlBase property - -Signature: - -```typescript -subUrlBase?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacycoresetup.injectedmetadata.md b/docs/development/core/public/kibana-plugin-core-public.legacycoresetup.injectedmetadata.md deleted file mode 100644 index 4014d27907e98..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacycoresetup.injectedmetadata.md +++ /dev/null @@ -1,15 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyCoreSetup](./kibana-plugin-core-public.legacycoresetup.md) > [injectedMetadata](./kibana-plugin-core-public.legacycoresetup.injectedmetadata.md) - -## LegacyCoreSetup.injectedMetadata property - -> Warning: This API is now obsolete. -> -> - -Signature: - -```typescript -injectedMetadata: InjectedMetadataSetup; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacycoresetup.md b/docs/development/core/public/kibana-plugin-core-public.legacycoresetup.md deleted file mode 100644 index 26220accbfaf3..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacycoresetup.md +++ /dev/null @@ -1,28 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyCoreSetup](./kibana-plugin-core-public.legacycoresetup.md) - -## LegacyCoreSetup interface - -> Warning: This API is now obsolete. -> -> - -Setup interface exposed to the legacy platform via the `ui/new_platform` module. - -Signature: - -```typescript -export interface LegacyCoreSetup extends CoreSetup -``` - -## Remarks - -Some methods are not supported in the legacy platform and while present to make this type compatibile with [CoreSetup](./kibana-plugin-core-public.coresetup.md), unsupported methods will throw exceptions when called. - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [injectedMetadata](./kibana-plugin-core-public.legacycoresetup.injectedmetadata.md) | InjectedMetadataSetup | | - diff --git a/docs/development/core/public/kibana-plugin-core-public.legacycorestart.injectedmetadata.md b/docs/development/core/public/kibana-plugin-core-public.legacycorestart.injectedmetadata.md deleted file mode 100644 index 288b288b1814d..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacycorestart.injectedmetadata.md +++ /dev/null @@ -1,15 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyCoreStart](./kibana-plugin-core-public.legacycorestart.md) > [injectedMetadata](./kibana-plugin-core-public.legacycorestart.injectedmetadata.md) - -## LegacyCoreStart.injectedMetadata property - -> Warning: This API is now obsolete. -> -> - -Signature: - -```typescript -injectedMetadata: InjectedMetadataStart; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacycorestart.md b/docs/development/core/public/kibana-plugin-core-public.legacycorestart.md deleted file mode 100644 index 7714d0f325d2c..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacycorestart.md +++ /dev/null @@ -1,28 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyCoreStart](./kibana-plugin-core-public.legacycorestart.md) - -## LegacyCoreStart interface - -> Warning: This API is now obsolete. -> -> - -Start interface exposed to the legacy platform via the `ui/new_platform` module. - -Signature: - -```typescript -export interface LegacyCoreStart extends CoreStart -``` - -## Remarks - -Some methods are not supported in the legacy platform and while present to make this type compatibile with [CoreStart](./kibana-plugin-core-public.corestart.md), unsupported methods will throw exceptions when called. - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [injectedMetadata](./kibana-plugin-core-public.legacycorestart.injectedmetadata.md) | InjectedMetadataStart | | - diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.category.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.category.md deleted file mode 100644 index a70aac70067de..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.category.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) > [category](./kibana-plugin-core-public.legacynavlink.category.md) - -## LegacyNavLink.category property - -Signature: - -```typescript -category?: AppCategory; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.euiicontype.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.euiicontype.md deleted file mode 100644 index b360578f98cf1..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.euiicontype.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) > [euiIconType](./kibana-plugin-core-public.legacynavlink.euiicontype.md) - -## LegacyNavLink.euiIconType property - -Signature: - -```typescript -euiIconType?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.icon.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.icon.md deleted file mode 100644 index c2c6f89be0d78..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.icon.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) > [icon](./kibana-plugin-core-public.legacynavlink.icon.md) - -## LegacyNavLink.icon property - -Signature: - -```typescript -icon?: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.id.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.id.md deleted file mode 100644 index fc79b6b4bd6dd..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.id.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) > [id](./kibana-plugin-core-public.legacynavlink.id.md) - -## LegacyNavLink.id property - -Signature: - -```typescript -id: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.md deleted file mode 100644 index b6402f991f965..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.md +++ /dev/null @@ -1,25 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) - -## LegacyNavLink interface - - -Signature: - -```typescript -export interface LegacyNavLink -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [category](./kibana-plugin-core-public.legacynavlink.category.md) | AppCategory | | -| [euiIconType](./kibana-plugin-core-public.legacynavlink.euiicontype.md) | string | | -| [icon](./kibana-plugin-core-public.legacynavlink.icon.md) | string | | -| [id](./kibana-plugin-core-public.legacynavlink.id.md) | string | | -| [order](./kibana-plugin-core-public.legacynavlink.order.md) | number | | -| [title](./kibana-plugin-core-public.legacynavlink.title.md) | string | | -| [url](./kibana-plugin-core-public.legacynavlink.url.md) | string | | - diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.order.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.order.md deleted file mode 100644 index 6ad3081b81d4b..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.order.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) > [order](./kibana-plugin-core-public.legacynavlink.order.md) - -## LegacyNavLink.order property - -Signature: - -```typescript -order: number; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.title.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.title.md deleted file mode 100644 index 70b0e37729f26..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.title.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) > [title](./kibana-plugin-core-public.legacynavlink.title.md) - -## LegacyNavLink.title property - -Signature: - -```typescript -title: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.url.md b/docs/development/core/public/kibana-plugin-core-public.legacynavlink.url.md deleted file mode 100644 index 7e543f4a90c1d..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.legacynavlink.url.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) > [url](./kibana-plugin-core-public.legacynavlink.url.md) - -## LegacyNavLink.url property - -Signature: - -```typescript -url: string; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.md b/docs/development/core/public/kibana-plugin-core-public.md index c931ce544f5d5..08b12190ef638 100644 --- a/docs/development/core/public/kibana-plugin-core-public.md +++ b/docs/development/core/public/kibana-plugin-core-public.md @@ -41,8 +41,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | Interface | Description | | --- | --- | -| [App](./kibana-plugin-core-public.app.md) | Extension of [common app properties](./kibana-plugin-core-public.appbase.md) with the mount function. | -| [AppBase](./kibana-plugin-core-public.appbase.md) | | +| [App](./kibana-plugin-core-public.app.md) | | | [AppCategory](./kibana-plugin-core-public.appcategory.md) | A category definition for nav links to know where to sort them in the left hand nav | | [AppLeaveConfirmAction](./kibana-plugin-core-public.appleaveconfirmaction.md) | Action to return from a [AppLeaveHandler](./kibana-plugin-core-public.appleavehandler.md) to show a confirmation message when trying to leave an application.See | | [AppLeaveDefaultAction](./kibana-plugin-core-public.appleavedefaultaction.md) | Action to return from a [AppLeaveHandler](./kibana-plugin-core-public.appleavehandler.md) to execute the default behaviour when leaving the application.See | @@ -90,10 +89,6 @@ The plugin integrates with the core system via lifecycle events: `setup` | [IHttpResponseInterceptorOverrides](./kibana-plugin-core-public.ihttpresponseinterceptoroverrides.md) | Properties that can be returned by HttpInterceptor.request to override the response. | | [ImageValidation](./kibana-plugin-core-public.imagevalidation.md) | | | [IUiSettingsClient](./kibana-plugin-core-public.iuisettingsclient.md) | Client-side client that provides access to the advanced settings stored in elasticsearch. The settings provide control over the behavior of the Kibana application. For example, a user can specify how to display numeric or date fields. Users can adjust the settings via Management UI. [IUiSettingsClient](./kibana-plugin-core-public.iuisettingsclient.md) | -| [LegacyApp](./kibana-plugin-core-public.legacyapp.md) | | -| [LegacyCoreSetup](./kibana-plugin-core-public.legacycoresetup.md) | Setup interface exposed to the legacy platform via the ui/new_platform module. | -| [LegacyCoreStart](./kibana-plugin-core-public.legacycorestart.md) | Start interface exposed to the legacy platform via the ui/new_platform module. | -| [LegacyNavLink](./kibana-plugin-core-public.legacynavlink.md) | | | [NavigateToAppOptions](./kibana-plugin-core-public.navigatetoappoptions.md) | Options for the [navigateToApp API](./kibana-plugin-core-public.applicationstart.navigatetoapp.md) | | [NotificationsSetup](./kibana-plugin-core-public.notificationssetup.md) | | | [NotificationsStart](./kibana-plugin-core-public.notificationsstart.md) | | @@ -173,7 +168,6 @@ The plugin integrates with the core system via lifecycle events: `setup` | [PluginInitializer](./kibana-plugin-core-public.plugininitializer.md) | The plugin export at the root of a plugin's public directory should conform to this interface. | | [PluginOpaqueId](./kibana-plugin-core-public.pluginopaqueid.md) | | | [PublicAppInfo](./kibana-plugin-core-public.publicappinfo.md) | Public information about a registered [application](./kibana-plugin-core-public.app.md) | -| [PublicLegacyAppInfo](./kibana-plugin-core-public.publiclegacyappinfo.md) | Information about a registered [legacy application](./kibana-plugin-core-public.legacyapp.md) | | [PublicUiSettingsParams](./kibana-plugin-core-public.publicuisettingsparams.md) | A sub-set of [UiSettingsParams](./kibana-plugin-core-public.uisettingsparams.md) exposed to the client-side. | | [SavedObjectAttribute](./kibana-plugin-core-public.savedobjectattribute.md) | Type definition for a Saved Object attribute value | | [SavedObjectAttributeSingle](./kibana-plugin-core-public.savedobjectattributesingle.md) | Don't use this type, it's simply a helper type for [SavedObjectAttribute](./kibana-plugin-core-public.savedobjectattribute.md) | diff --git a/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.md b/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.md index aa51e5706e3d7..b7c01fae4314f 100644 --- a/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.md +++ b/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.md @@ -16,7 +16,7 @@ export interface NavigateToAppOptions | Property | Type | Description | | --- | --- | --- | -| [path](./kibana-plugin-core-public.navigatetoappoptions.path.md) | string | optional path inside application to deep link to. If undefined, will use [the app's default path](./kibana-plugin-core-public.appbase.defaultpath.md)\` as default. | +| [path](./kibana-plugin-core-public.navigatetoappoptions.path.md) | string | optional path inside application to deep link to. If undefined, will use [the app's default path](./kibana-plugin-core-public.app.defaultpath.md)\` as default. | | [replace](./kibana-plugin-core-public.navigatetoappoptions.replace.md) | boolean | if true, will not create a new history entry when navigating (using replace instead of push) | | [state](./kibana-plugin-core-public.navigatetoappoptions.state.md) | unknown | optional state to forward to the application | diff --git a/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.path.md b/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.path.md index 58ce7e02d8dd8..095553d05778c 100644 --- a/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.path.md +++ b/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.path.md @@ -4,7 +4,7 @@ ## NavigateToAppOptions.path property -optional path inside application to deep link to. If undefined, will use [the app's default path](./kibana-plugin-core-public.appbase.defaultpath.md)\` as default. +optional path inside application to deep link to. If undefined, will use [the app's default path](./kibana-plugin-core-public.app.defaultpath.md)\` as default. Signature: diff --git a/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.replace.md b/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.replace.md index 9530d03486299..8a7440025aedc 100644 --- a/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.replace.md +++ b/docs/development/core/public/kibana-plugin-core-public.navigatetoappoptions.replace.md @@ -11,8 +11,3 @@ if true, will not create a new history entry when navigating (using `replace` in ```typescript replace?: boolean; ``` - -## Remarks - -This option not be used when navigating from and/or to legacy applications. - diff --git a/docs/development/core/public/kibana-plugin-core-public.publicappinfo.md b/docs/development/core/public/kibana-plugin-core-public.publicappinfo.md index 4b3b103c92731..3717dc847db25 100644 --- a/docs/development/core/public/kibana-plugin-core-public.publicappinfo.md +++ b/docs/development/core/public/kibana-plugin-core-public.publicappinfo.md @@ -10,7 +10,6 @@ Public information about a registered [application](./kibana-plugin-core-public. ```typescript export declare type PublicAppInfo = Omit & { - legacy: false; status: AppStatus; navLinkStatus: AppNavLinkStatus; appRoute: string; diff --git a/docs/development/core/public/kibana-plugin-core-public.publiclegacyappinfo.md b/docs/development/core/public/kibana-plugin-core-public.publiclegacyappinfo.md deleted file mode 100644 index 051638daabd12..0000000000000 --- a/docs/development/core/public/kibana-plugin-core-public.publiclegacyappinfo.md +++ /dev/null @@ -1,17 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [PublicLegacyAppInfo](./kibana-plugin-core-public.publiclegacyappinfo.md) - -## PublicLegacyAppInfo type - -Information about a registered [legacy application](./kibana-plugin-core-public.legacyapp.md) - -Signature: - -```typescript -export declare type PublicLegacyAppInfo = Omit & { - legacy: true; - status: AppStatus; - navLinkStatus: AppNavLinkStatus; -}; -``` diff --git a/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.filter.md b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.filter.md index 900f8e333f337..2c20fe2dab00f 100644 --- a/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.filter.md +++ b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.filter.md @@ -7,5 +7,5 @@ Signature: ```typescript -filter?: string; +filter?: string | KueryNode; ``` diff --git a/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.md b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.md index ebd0a99531755..903462ac3039d 100644 --- a/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.md +++ b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindoptions.md @@ -17,7 +17,7 @@ export interface SavedObjectsFindOptions | --- | --- | --- | | [defaultSearchOperator](./kibana-plugin-core-public.savedobjectsfindoptions.defaultsearchoperator.md) | 'AND' | 'OR' | | | [fields](./kibana-plugin-core-public.savedobjectsfindoptions.fields.md) | string[] | An array of fields to include in the results | -| [filter](./kibana-plugin-core-public.savedobjectsfindoptions.filter.md) | string | | +| [filter](./kibana-plugin-core-public.savedobjectsfindoptions.filter.md) | string | KueryNode | | | [hasReference](./kibana-plugin-core-public.savedobjectsfindoptions.hasreference.md) | {
type: string;
id: string;
} | | | [namespaces](./kibana-plugin-core-public.savedobjectsfindoptions.namespaces.md) | string[] | | | [page](./kibana-plugin-core-public.savedobjectsfindoptions.page.md) | number | | diff --git a/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md b/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md index 9c70e658014b3..0838572f26f49 100644 --- a/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md +++ b/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md @@ -8,5 +8,5 @@ Signature: ```typescript -export declare type AppenderConfigType = TypeOf; +export declare type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig; ``` diff --git a/docs/development/core/server/kibana-plugin-core-server.md b/docs/development/core/server/kibana-plugin-core-server.md index 89330d2a86f76..dfffdffb08a08 100644 --- a/docs/development/core/server/kibana-plugin-core-server.md +++ b/docs/development/core/server/kibana-plugin-core-server.md @@ -123,7 +123,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | [LoggerFactory](./kibana-plugin-core-server.loggerfactory.md) | The single purpose of LoggerFactory interface is to define a way to retrieve a context-based logger instance. | | [LoggingServiceSetup](./kibana-plugin-core-server.loggingservicesetup.md) | Provides APIs to plugins for customizing the plugin's logger. | | [LogMeta](./kibana-plugin-core-server.logmeta.md) | Contextual metadata | -| [MetricsServiceSetup](./kibana-plugin-core-server.metricsservicesetup.md) | | +| [MetricsServiceSetup](./kibana-plugin-core-server.metricsservicesetup.md) | APIs to retrieves metrics gathered and exposed by the core platform. | | [NodesVersionCompatibility](./kibana-plugin-core-server.nodesversioncompatibility.md) | | | [OnPostAuthToolkit](./kibana-plugin-core-server.onpostauthtoolkit.md) | A tool set defining an outcome of OnPostAuth interceptor for incoming request. | | [OnPreAuthToolkit](./kibana-plugin-core-server.onpreauthtoolkit.md) | A tool set defining an outcome of OnPreAuth interceptor for incoming request. | diff --git a/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.collectioninterval.md b/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.collectioninterval.md new file mode 100644 index 0000000000000..6f05526b66c83 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.collectioninterval.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [MetricsServiceSetup](./kibana-plugin-core-server.metricsservicesetup.md) > [collectionInterval](./kibana-plugin-core-server.metricsservicesetup.collectioninterval.md) + +## MetricsServiceSetup.collectionInterval property + +Interval metrics are collected in milliseconds + +Signature: + +```typescript +readonly collectionInterval: number; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.getopsmetrics_.md b/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.getopsmetrics_.md new file mode 100644 index 0000000000000..61107fbf20ad9 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.getopsmetrics_.md @@ -0,0 +1,24 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [MetricsServiceSetup](./kibana-plugin-core-server.metricsservicesetup.md) > [getOpsMetrics$](./kibana-plugin-core-server.metricsservicesetup.getopsmetrics_.md) + +## MetricsServiceSetup.getOpsMetrics$ property + +Retrieve an observable emitting the [OpsMetrics](./kibana-plugin-core-server.opsmetrics.md) gathered. The observable will emit an initial value during core's `start` phase, and a new value every fixed interval of time, based on the `opts.interval` configuration property. + +Signature: + +```typescript +getOpsMetrics$: () => Observable; +``` + +## Example + + +```ts +core.metrics.getOpsMetrics$().subscribe(metrics => { + // do something with the metrics +}) + +``` + diff --git a/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.md b/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.md index 0bec919797b6f..5fcb1417dea0e 100644 --- a/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.md +++ b/docs/development/core/server/kibana-plugin-core-server.metricsservicesetup.md @@ -4,8 +4,18 @@ ## MetricsServiceSetup interface +APIs to retrieves metrics gathered and exposed by the core platform. + Signature: ```typescript export interface MetricsServiceSetup ``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [collectionInterval](./kibana-plugin-core-server.metricsservicesetup.collectioninterval.md) | number | Interval metrics are collected in milliseconds | +| [getOpsMetrics$](./kibana-plugin-core-server.metricsservicesetup.getopsmetrics_.md) | () => Observable<OpsMetrics> | Retrieve an observable emitting the [OpsMetrics](./kibana-plugin-core-server.opsmetrics.md) gathered. The observable will emit an initial value during core's start phase, and a new value every fixed interval of time, based on the opts.interval configuration property. | + diff --git a/docs/development/core/server/kibana-plugin-core-server.opsmetrics.collected_at.md b/docs/development/core/server/kibana-plugin-core-server.opsmetrics.collected_at.md new file mode 100644 index 0000000000000..25125569b7b38 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.opsmetrics.collected_at.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [OpsMetrics](./kibana-plugin-core-server.opsmetrics.md) > [collected\_at](./kibana-plugin-core-server.opsmetrics.collected_at.md) + +## OpsMetrics.collected\_at property + +Time metrics were recorded at. + +Signature: + +```typescript +collected_at: Date; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.opsmetrics.md b/docs/development/core/server/kibana-plugin-core-server.opsmetrics.md index d2d4782385c06..9803c0fbd53cc 100644 --- a/docs/development/core/server/kibana-plugin-core-server.opsmetrics.md +++ b/docs/development/core/server/kibana-plugin-core-server.opsmetrics.md @@ -16,6 +16,7 @@ export interface OpsMetrics | Property | Type | Description | | --- | --- | --- | +| [collected\_at](./kibana-plugin-core-server.opsmetrics.collected_at.md) | Date | Time metrics were recorded at. | | [concurrent\_connections](./kibana-plugin-core-server.opsmetrics.concurrent_connections.md) | OpsServerMetrics['concurrent_connections'] | number of current concurrent connections to the server | | [os](./kibana-plugin-core-server.opsmetrics.os.md) | OpsOsMetrics | OS related metrics | | [process](./kibana-plugin-core-server.opsmetrics.process.md) | OpsProcessMetrics | Process related metrics | diff --git a/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.cpu.md b/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.cpu.md new file mode 100644 index 0000000000000..095c45266a251 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.cpu.md @@ -0,0 +1,22 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [OpsOsMetrics](./kibana-plugin-core-server.opsosmetrics.md) > [cpu](./kibana-plugin-core-server.opsosmetrics.cpu.md) + +## OpsOsMetrics.cpu property + +cpu cgroup metrics, undefined when not running in a cgroup + +Signature: + +```typescript +cpu?: { + control_group: string; + cfs_period_micros: number; + cfs_quota_micros: number; + stat: { + number_of_elapsed_periods: number; + number_of_times_throttled: number; + time_throttled_nanos: number; + }; + }; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.cpuacct.md b/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.cpuacct.md new file mode 100644 index 0000000000000..140646a0d1a35 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.cpuacct.md @@ -0,0 +1,16 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [OpsOsMetrics](./kibana-plugin-core-server.opsosmetrics.md) > [cpuacct](./kibana-plugin-core-server.opsosmetrics.cpuacct.md) + +## OpsOsMetrics.cpuacct property + +cpu accounting metrics, undefined when not running in a cgroup + +Signature: + +```typescript +cpuacct?: { + control_group: string; + usage_nanos: number; + }; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.md b/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.md index 5fedb76a9c8d7..8938608531139 100644 --- a/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.md +++ b/docs/development/core/server/kibana-plugin-core-server.opsosmetrics.md @@ -16,6 +16,8 @@ export interface OpsOsMetrics | Property | Type | Description | | --- | --- | --- | +| [cpu](./kibana-plugin-core-server.opsosmetrics.cpu.md) | {
control_group: string;
cfs_period_micros: number;
cfs_quota_micros: number;
stat: {
number_of_elapsed_periods: number;
number_of_times_throttled: number;
time_throttled_nanos: number;
};
} | cpu cgroup metrics, undefined when not running in a cgroup | +| [cpuacct](./kibana-plugin-core-server.opsosmetrics.cpuacct.md) | {
control_group: string;
usage_nanos: number;
} | cpu accounting metrics, undefined when not running in a cgroup | | [distro](./kibana-plugin-core-server.opsosmetrics.distro.md) | string | The os distrib. Only present for linux platforms | | [distroRelease](./kibana-plugin-core-server.opsosmetrics.distrorelease.md) | string | The os distrib release, prefixed by the os distrib. Only present for linux platforms | | [load](./kibana-plugin-core-server.opsosmetrics.load.md) | {
'1m': number;
'5m': number;
'15m': number;
} | cpu load metrics | diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.filter.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.filter.md index ae7b7a28bcd09..c98a4fe5e8796 100644 --- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.filter.md +++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.filter.md @@ -7,5 +7,5 @@ Signature: ```typescript -filter?: string; +filter?: string | KueryNode; ``` diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.md index 15a9d99b3d062..804c83f7c1b48 100644 --- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.md +++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindoptions.md @@ -17,7 +17,7 @@ export interface SavedObjectsFindOptions | --- | --- | --- | | [defaultSearchOperator](./kibana-plugin-core-server.savedobjectsfindoptions.defaultsearchoperator.md) | 'AND' | 'OR' | | | [fields](./kibana-plugin-core-server.savedobjectsfindoptions.fields.md) | string[] | An array of fields to include in the results | -| [filter](./kibana-plugin-core-server.savedobjectsfindoptions.filter.md) | string | | +| [filter](./kibana-plugin-core-server.savedobjectsfindoptions.filter.md) | string | KueryNode | | | [hasReference](./kibana-plugin-core-server.savedobjectsfindoptions.hasreference.md) | {
type: string;
id: string;
} | | | [namespaces](./kibana-plugin-core-server.savedobjectsfindoptions.namespaces.md) | string[] | | | [page](./kibana-plugin-core-server.savedobjectsfindoptions.page.md) | number | | diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.md index 6ef7b991bb159..650459bfdb435 100644 --- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.md +++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.md @@ -16,8 +16,6 @@ export interface SavedObjectsServiceSetup When plugins access the Saved Objects client, a new client is created using the factory provided to `setClientFactory` and wrapped by all wrappers registered through `addClientWrapper`. -All the setup APIs will throw if called after the service has started, and therefor cannot be used from legacy plugin code. Legacy plugins should use the legacy savedObject service until migrated. - ## Example 1 diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.registertype.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.registertype.md index 57c9e04966c1b..54e01d3110a2d 100644 --- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.registertype.md +++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsservicesetup.registertype.md @@ -14,10 +14,6 @@ See the [mappings format](./kibana-plugin-core-server.savedobjectstypemappingdef registerType: (type: SavedObjectsType) => void; ``` -## Remarks - -The type definition is an aggregation of the legacy savedObjects `schema`, `mappings` and `migration` concepts. This API is the single entry point to register saved object types in the new platform. - ## Example diff --git a/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.dependencies_.md b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.dependencies_.md new file mode 100644 index 0000000000000..7475f0e3a4c1c --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.dependencies_.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [StatusServiceSetup](./kibana-plugin-core-server.statusservicesetup.md) > [dependencies$](./kibana-plugin-core-server.statusservicesetup.dependencies_.md) + +## StatusServiceSetup.dependencies$ property + +Current status for all plugins this plugin depends on. Each key of the `Record` is a plugin id. + +Signature: + +```typescript +dependencies$: Observable>; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.derivedstatus_.md b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.derivedstatus_.md new file mode 100644 index 0000000000000..6c65e44270a06 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.derivedstatus_.md @@ -0,0 +1,20 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [StatusServiceSetup](./kibana-plugin-core-server.statusservicesetup.md) > [derivedStatus$](./kibana-plugin-core-server.statusservicesetup.derivedstatus_.md) + +## StatusServiceSetup.derivedStatus$ property + +The status of this plugin as derived from its dependencies. + +Signature: + +```typescript +derivedStatus$: Observable; +``` + +## Remarks + +By default, plugins inherit this derived status from their dependencies. Calling overrides this default status. + +This may emit multliple times for a single status change event as propagates through the dependency tree + diff --git a/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.md b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.md index 3d3b73ccda25f..ba0645be4d26c 100644 --- a/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.md +++ b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.md @@ -12,10 +12,73 @@ API for accessing status of Core and this plugin's dependencies as well as for c export interface StatusServiceSetup ``` +## Remarks + +By default, a plugin inherits it's current status from the most severe status level of any Core services and any plugins that it depends on. This default status is available on the API. + +Plugins may customize their status calculation by calling the API with an Observable. Within this Observable, a plugin may choose to only depend on the status of some of its dependencies, to ignore severe status levels of particular Core services they are not concerned with, or to make its status dependent on other external services. + +## Example 1 + +Customize a plugin's status to only depend on the status of SavedObjects: + +```ts +core.status.set( + core.status.core$.pipe( +. map((coreStatus) => { + return coreStatus.savedObjects; + }) ; + ); +); + +``` + +## Example 2 + +Customize a plugin's status to include an external service: + +```ts +const externalStatus$ = interval(1000).pipe( + switchMap(async () => { + const resp = await fetch(`https://myexternaldep.com/_healthz`); + const body = await resp.json(); + if (body.ok) { + return of({ level: ServiceStatusLevels.available, summary: 'External Service is up'}); + } else { + return of({ level: ServiceStatusLevels.available, summary: 'External Service is unavailable'}); + } + }), + catchError((error) => { + of({ level: ServiceStatusLevels.unavailable, summary: `External Service is down`, meta: { error }}) + }) +); + +core.status.set( + combineLatest([core.status.derivedStatus$, externalStatus$]).pipe( + map(([derivedStatus, externalStatus]) => { + if (externalStatus.level > derivedStatus) { + return externalStatus; + } else { + return derivedStatus; + } + }) + ) +); + +``` + ## Properties | Property | Type | Description | | --- | --- | --- | | [core$](./kibana-plugin-core-server.statusservicesetup.core_.md) | Observable<CoreStatus> | Current status for all Core services. | +| [dependencies$](./kibana-plugin-core-server.statusservicesetup.dependencies_.md) | Observable<Record<string, ServiceStatus>> | Current status for all plugins this plugin depends on. Each key of the Record is a plugin id. | +| [derivedStatus$](./kibana-plugin-core-server.statusservicesetup.derivedstatus_.md) | Observable<ServiceStatus> | The status of this plugin as derived from its dependencies. | | [overall$](./kibana-plugin-core-server.statusservicesetup.overall_.md) | Observable<ServiceStatus> | Overall system status for all of Kibana. | +## Methods + +| Method | Description | +| --- | --- | +| [set(status$)](./kibana-plugin-core-server.statusservicesetup.set.md) | Allows a plugin to specify a custom status dependent on its own criteria. Completely overrides the default inherited status. | + diff --git a/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.set.md b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.set.md new file mode 100644 index 0000000000000..143cd397c40ae --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.statusservicesetup.set.md @@ -0,0 +1,28 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [StatusServiceSetup](./kibana-plugin-core-server.statusservicesetup.md) > [set](./kibana-plugin-core-server.statusservicesetup.set.md) + +## StatusServiceSetup.set() method + +Allows a plugin to specify a custom status dependent on its own criteria. Completely overrides the default inherited status. + +Signature: + +```typescript +set(status$: Observable): void; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| status$ | Observable<ServiceStatus> | | + +Returns: + +`void` + +## Remarks + +See the [StatusServiceSetup.derivedStatus$](./kibana-plugin-core-server.statusservicesetup.derivedstatus_.md) API for leveraging the default status calculation that is provided by Core. + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.abortsignal.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.abortsignal.md deleted file mode 100644 index 791f1b63e6539..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.abortsignal.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FetchOptions](./kibana-plugin-plugins-data-public.fetchoptions.md) > [abortSignal](./kibana-plugin-plugins-data-public.fetchoptions.abortsignal.md) - -## FetchOptions.abortSignal property - -Signature: - -```typescript -abortSignal?: AbortSignal; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.md deleted file mode 100644 index f07fdd4280533..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.md +++ /dev/null @@ -1,19 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FetchOptions](./kibana-plugin-plugins-data-public.fetchoptions.md) - -## FetchOptions interface - -Signature: - -```typescript -export interface FetchOptions -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [abortSignal](./kibana-plugin-plugins-data-public.fetchoptions.abortsignal.md) | AbortSignal | | -| [searchStrategyId](./kibana-plugin-plugins-data-public.fetchoptions.searchstrategyid.md) | string | | - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.searchstrategyid.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.searchstrategyid.md deleted file mode 100644 index 8824529eb4eca..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fetchoptions.searchstrategyid.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FetchOptions](./kibana-plugin-plugins-data-public.fetchoptions.md) > [searchStrategyId](./kibana-plugin-plugins-data-public.fetchoptions.searchstrategyid.md) - -## FetchOptions.searchStrategyId property - -Signature: - -```typescript -searchStrategyId?: string; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist._constructor_.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist._constructor_.md deleted file mode 100644 index 9f9613a5a68f7..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist._constructor_.md +++ /dev/null @@ -1,23 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [(constructor)](./kibana-plugin-plugins-data-public.fieldlist._constructor_.md) - -## FieldList.(constructor) - -Constructs a new instance of the `FieldList` class - -Signature: - -```typescript -constructor(indexPattern: IndexPattern, specs?: FieldSpec[], shortDotsEnable?: boolean, onNotification?: OnNotification); -``` - -## Parameters - -| Parameter | Type | Description | -| --- | --- | --- | -| indexPattern | IndexPattern | | -| specs | FieldSpec[] | | -| shortDotsEnable | boolean | | -| onNotification | OnNotification | | - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.add.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.add.md deleted file mode 100644 index ae3d82f0cc3ea..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.add.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [add](./kibana-plugin-plugins-data-public.fieldlist.add.md) - -## FieldList.add property - -Signature: - -```typescript -readonly add: (field: FieldSpec) => void; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getall.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getall.md deleted file mode 100644 index da29a4de9acc8..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getall.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [getAll](./kibana-plugin-plugins-data-public.fieldlist.getall.md) - -## FieldList.getAll property - -Signature: - -```typescript -readonly getAll: () => IndexPatternField[]; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getbyname.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getbyname.md deleted file mode 100644 index af368d003423a..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getbyname.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [getByName](./kibana-plugin-plugins-data-public.fieldlist.getbyname.md) - -## FieldList.getByName property - -Signature: - -```typescript -readonly getByName: (name: IndexPatternField['name']) => IndexPatternField | undefined; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getbytype.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getbytype.md deleted file mode 100644 index 16bae3ee7c555..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.getbytype.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [getByType](./kibana-plugin-plugins-data-public.fieldlist.getbytype.md) - -## FieldList.getByType property - -Signature: - -```typescript -readonly getByType: (type: IndexPatternField['type']) => any[]; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.md index 012b069430290..79bcaf9700cf0 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.md @@ -1,32 +1,11 @@ -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [fieldList](./kibana-plugin-plugins-data-public.fieldlist.md) -## FieldList class +## fieldList variable Signature: ```typescript -export declare class FieldList extends Array implements IIndexPatternFieldList +fieldList: (specs?: FieldSpec[], shortDotsEnable?: boolean) => IIndexPatternFieldList ``` - -## Constructors - -| Constructor | Modifiers | Description | -| --- | --- | --- | -| [(constructor)(indexPattern, specs, shortDotsEnable, onNotification)](./kibana-plugin-plugins-data-public.fieldlist._constructor_.md) | | Constructs a new instance of the FieldList class | - -## Properties - -| Property | Modifiers | Type | Description | -| --- | --- | --- | --- | -| [add](./kibana-plugin-plugins-data-public.fieldlist.add.md) | | (field: FieldSpec) => void | | -| [getAll](./kibana-plugin-plugins-data-public.fieldlist.getall.md) | | () => IndexPatternField[] | | -| [getByName](./kibana-plugin-plugins-data-public.fieldlist.getbyname.md) | | (name: IndexPatternField['name']) => IndexPatternField | undefined | | -| [getByType](./kibana-plugin-plugins-data-public.fieldlist.getbytype.md) | | (type: IndexPatternField['type']) => any[] | | -| [remove](./kibana-plugin-plugins-data-public.fieldlist.remove.md) | | (field: IFieldType) => void | | -| [removeAll](./kibana-plugin-plugins-data-public.fieldlist.removeall.md) | | () => void | | -| [replaceAll](./kibana-plugin-plugins-data-public.fieldlist.replaceall.md) | | (specs: FieldSpec[]) => void | | -| [toSpec](./kibana-plugin-plugins-data-public.fieldlist.tospec.md) | | () => {
count: number;
script: string | undefined;
lang: string | undefined;
conflictDescriptions: Record<string, string[]> | undefined;
name: string;
type: string;
esTypes: string[] | undefined;
scripted: boolean;
searchable: boolean;
aggregatable: boolean;
readFromDocValues: boolean;
subType: import("../types").IFieldSubType | undefined;
format: any;
}[] | | -| [update](./kibana-plugin-plugins-data-public.fieldlist.update.md) | | (field: FieldSpec) => void | | - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.remove.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.remove.md deleted file mode 100644 index 149410adb3550..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.remove.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [remove](./kibana-plugin-plugins-data-public.fieldlist.remove.md) - -## FieldList.remove property - -Signature: - -```typescript -readonly remove: (field: IFieldType) => void; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.removeall.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.removeall.md deleted file mode 100644 index 92a45349ad005..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.removeall.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [removeAll](./kibana-plugin-plugins-data-public.fieldlist.removeall.md) - -## FieldList.removeAll property - -Signature: - -```typescript -readonly removeAll: () => void; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.replaceall.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.replaceall.md deleted file mode 100644 index 5330440e6b96a..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.replaceall.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [replaceAll](./kibana-plugin-plugins-data-public.fieldlist.replaceall.md) - -## FieldList.replaceAll property - -Signature: - -```typescript -readonly replaceAll: (specs: FieldSpec[]) => void; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.tospec.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.tospec.md deleted file mode 100644 index e646339feb495..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.tospec.md +++ /dev/null @@ -1,25 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [toSpec](./kibana-plugin-plugins-data-public.fieldlist.tospec.md) - -## FieldList.toSpec property - -Signature: - -```typescript -readonly toSpec: () => { - count: number; - script: string | undefined; - lang: string | undefined; - conflictDescriptions: Record | undefined; - name: string; - type: string; - esTypes: string[] | undefined; - scripted: boolean; - searchable: boolean; - aggregatable: boolean; - readFromDocValues: boolean; - subType: import("../types").IFieldSubType | undefined; - format: any; - }[]; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.update.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.update.md deleted file mode 100644 index c718e47b31b50..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.fieldlist.update.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) > [update](./kibana-plugin-plugins-data-public.fieldlist.update.md) - -## FieldList.update property - -Signature: - -```typescript -readonly update: (field: FieldSpec) => void; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.getsearchparamsfromrequest.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.getsearchparamsfromrequest.md index 337b4b3302cc3..d32e9a955f890 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.getsearchparamsfromrequest.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.getsearchparamsfromrequest.md @@ -9,7 +9,6 @@ ```typescript export declare function getSearchParamsFromRequest(searchRequest: SearchRequest, dependencies: { - esShardTimeout: number; getConfig: GetConfigFn; }): ISearchRequestParams; ``` @@ -19,7 +18,7 @@ export declare function getSearchParamsFromRequest(searchRequest: SearchRequest, | Parameter | Type | Description | | --- | --- | --- | | searchRequest | SearchRequest | | -| dependencies | {
esShardTimeout: number;
getConfig: GetConfigFn;
} | | +| dependencies | {
getConfig: GetConfigFn;
} | | Returns: diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.md index 6f42fb32fdb7b..3ff2afafcc514 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.md @@ -28,7 +28,7 @@ export interface IFieldType | [searchable](./kibana-plugin-plugins-data-public.ifieldtype.searchable.md) | boolean | | | [sortable](./kibana-plugin-plugins-data-public.ifieldtype.sortable.md) | boolean | | | [subType](./kibana-plugin-plugins-data-public.ifieldtype.subtype.md) | IFieldSubType | | -| [toSpec](./kibana-plugin-plugins-data-public.ifieldtype.tospec.md) | () => FieldSpec | | +| [toSpec](./kibana-plugin-plugins-data-public.ifieldtype.tospec.md) | (options?: {
getFormatterForField?: IndexPattern['getFormatterForField'];
}) => FieldSpec | | | [type](./kibana-plugin-plugins-data-public.ifieldtype.type.md) | string | | | [visualizable](./kibana-plugin-plugins-data-public.ifieldtype.visualizable.md) | boolean | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.tospec.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.tospec.md index 1fb4084c25d34..52238ea2a00ca 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.tospec.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ifieldtype.tospec.md @@ -7,5 +7,7 @@ Signature: ```typescript -toSpec?: () => FieldSpec; +toSpec?: (options?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }) => FieldSpec; ``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.iindexpatternfieldlist.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.iindexpatternfieldlist.md index b068c4804c0dd..b1e13ffaabd07 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.iindexpatternfieldlist.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.iindexpatternfieldlist.md @@ -21,5 +21,6 @@ export interface IIndexPatternFieldList extends Array | [remove(field)](./kibana-plugin-plugins-data-public.iindexpatternfieldlist.remove.md) | | | [removeAll()](./kibana-plugin-plugins-data-public.iindexpatternfieldlist.removeall.md) | | | [replaceAll(specs)](./kibana-plugin-plugins-data-public.iindexpatternfieldlist.replaceall.md) | | +| [toSpec(options)](./kibana-plugin-plugins-data-public.iindexpatternfieldlist.tospec.md) | | | [update(field)](./kibana-plugin-plugins-data-public.iindexpatternfieldlist.update.md) | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.iindexpatternfieldlist.tospec.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.iindexpatternfieldlist.tospec.md new file mode 100644 index 0000000000000..fd20f2944c5be --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.iindexpatternfieldlist.tospec.md @@ -0,0 +1,24 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IIndexPatternFieldList](./kibana-plugin-plugins-data-public.iindexpatternfieldlist.md) > [toSpec](./kibana-plugin-plugins-data-public.iindexpatternfieldlist.tospec.md) + +## IIndexPatternFieldList.toSpec() method + +Signature: + +```typescript +toSpec(options?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }): FieldSpec[]; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| options | {
getFormatterForField?: IndexPattern['getFormatterForField'];
} | | + +Returns: + +`FieldSpec[]` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern._constructor_.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern._constructor_.md index 2e078e3404fe6..a5bb15c963978 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern._constructor_.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern._constructor_.md @@ -9,7 +9,7 @@ Constructs a new instance of the `IndexPattern` class Signature: ```typescript -constructor(id: string | undefined, { savedObjectsClient, apiClient, patternCache, fieldFormats, onNotification, onError, shortDotsEnable, metaFields, }: IndexPatternDeps); +constructor(id: string | undefined, { savedObjectsClient, apiClient, patternCache, fieldFormats, indexPatternsService, onNotification, onError, shortDotsEnable, metaFields, }: IndexPatternDeps); ``` ## Parameters @@ -17,5 +17,5 @@ constructor(id: string | undefined, { savedObjectsClient, apiClient, patternCach | Parameter | Type | Description | | --- | --- | --- | | id | string | undefined | | -| { savedObjectsClient, apiClient, patternCache, fieldFormats, onNotification, onError, shortDotsEnable, metaFields, } | IndexPatternDeps | | +| { savedObjectsClient, apiClient, patternCache, fieldFormats, indexPatternsService, onNotification, onError, shortDotsEnable, metaFields, } | IndexPatternDeps | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.md index 37db063e284ec..87ce1e258712a 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.md @@ -14,7 +14,7 @@ export declare class IndexPattern implements IIndexPattern | Constructor | Modifiers | Description | | --- | --- | --- | -| [(constructor)(id, { savedObjectsClient, apiClient, patternCache, fieldFormats, onNotification, onError, shortDotsEnable, metaFields, })](./kibana-plugin-plugins-data-public.indexpattern._constructor_.md) | | Constructs a new instance of the IndexPattern class | +| [(constructor)(id, { savedObjectsClient, apiClient, patternCache, fieldFormats, indexPatternsService, onNotification, onError, shortDotsEnable, metaFields, })](./kibana-plugin-plugins-data-public.indexpattern._constructor_.md) | | Constructs a new instance of the IndexPattern class | ## Properties @@ -29,11 +29,13 @@ export declare class IndexPattern implements IIndexPattern | [id](./kibana-plugin-plugins-data-public.indexpattern.id.md) | | string | | | [intervalName](./kibana-plugin-plugins-data-public.indexpattern.intervalname.md) | | string | undefined | | | [metaFields](./kibana-plugin-plugins-data-public.indexpattern.metafields.md) | | string[] | | +| [originalBody](./kibana-plugin-plugins-data-public.indexpattern.originalbody.md) | | {
[key: string]: any;
} | | | [sourceFilters](./kibana-plugin-plugins-data-public.indexpattern.sourcefilters.md) | | SourceFilter[] | | | [timeFieldName](./kibana-plugin-plugins-data-public.indexpattern.timefieldname.md) | | string | undefined | | | [title](./kibana-plugin-plugins-data-public.indexpattern.title.md) | | string | | | [type](./kibana-plugin-plugins-data-public.indexpattern.type.md) | | string | undefined | | | [typeMeta](./kibana-plugin-plugins-data-public.indexpattern.typemeta.md) | | TypeMeta | | +| [version](./kibana-plugin-plugins-data-public.indexpattern.version.md) | | string | undefined | | ## Methods @@ -60,8 +62,5 @@ export declare class IndexPattern implements IIndexPattern | [prepBody()](./kibana-plugin-plugins-data-public.indexpattern.prepbody.md) | | | | [refreshFields()](./kibana-plugin-plugins-data-public.indexpattern.refreshfields.md) | | | | [removeScriptedField(fieldName)](./kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md) | | | -| [save(saveAttempts)](./kibana-plugin-plugins-data-public.indexpattern.save.md) | | | -| [toJSON()](./kibana-plugin-plugins-data-public.indexpattern.tojson.md) | | | | [toSpec()](./kibana-plugin-plugins-data-public.indexpattern.tospec.md) | | | -| [toString()](./kibana-plugin-plugins-data-public.indexpattern.tostring.md) | | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.originalbody.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.originalbody.md new file mode 100644 index 0000000000000..4bc3c76afbae9 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.originalbody.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IndexPattern](./kibana-plugin-plugins-data-public.indexpattern.md) > [originalBody](./kibana-plugin-plugins-data-public.indexpattern.originalbody.md) + +## IndexPattern.originalBody property + +Signature: + +```typescript +originalBody: { + [key: string]: any; + }; +``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md index 42c6dd72b8c4e..e902d9c42b082 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md @@ -7,7 +7,7 @@ Signature: ```typescript -removeScriptedField(fieldName: string): Promise; +removeScriptedField(fieldName: string): void; ``` ## Parameters @@ -18,5 +18,5 @@ removeScriptedField(fieldName: string): Promise; Returns: -`Promise` +`void` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.save.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.save.md deleted file mode 100644 index d0b471cc2bc21..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.save.md +++ /dev/null @@ -1,22 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IndexPattern](./kibana-plugin-plugins-data-public.indexpattern.md) > [save](./kibana-plugin-plugins-data-public.indexpattern.save.md) - -## IndexPattern.save() method - -Signature: - -```typescript -save(saveAttempts?: number): Promise; -``` - -## Parameters - -| Parameter | Type | Description | -| --- | --- | --- | -| saveAttempts | number | | - -Returns: - -`Promise` - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.tojson.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.tojson.md deleted file mode 100644 index 0ae04bb424d44..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.tojson.md +++ /dev/null @@ -1,15 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IndexPattern](./kibana-plugin-plugins-data-public.indexpattern.md) > [toJSON](./kibana-plugin-plugins-data-public.indexpattern.tojson.md) - -## IndexPattern.toJSON() method - -Signature: - -```typescript -toJSON(): string | undefined; -``` -Returns: - -`string | undefined` - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.tostring.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.tostring.md deleted file mode 100644 index a10b549a7b9eb..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.tostring.md +++ /dev/null @@ -1,15 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IndexPattern](./kibana-plugin-plugins-data-public.indexpattern.md) > [toString](./kibana-plugin-plugins-data-public.indexpattern.tostring.md) - -## IndexPattern.toString() method - -Signature: - -```typescript -toString(): string; -``` -Returns: - -`string` - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.version.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.version.md new file mode 100644 index 0000000000000..99d3bc4e7a04d --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.version.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IndexPattern](./kibana-plugin-plugins-data-public.indexpattern.md) > [version](./kibana-plugin-plugins-data-public.indexpattern.version.md) + +## IndexPattern.version property + +Signature: + +```typescript +version: string | undefined; +``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield._constructor_.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield._constructor_.md index 10b65bdccdf87..5d467a7a9cbce 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield._constructor_.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield._constructor_.md @@ -9,15 +9,13 @@ Constructs a new instance of the `IndexPatternField` class Signature: ```typescript -constructor(indexPattern: IndexPattern, spec: FieldSpec, displayName: string, onNotification: OnNotification); +constructor(spec: FieldSpec, displayName: string); ``` ## Parameters | Parameter | Type | Description | | --- | --- | --- | -| indexPattern | IndexPattern | | | spec | FieldSpec | | | displayName | string | | -| onNotification | OnNotification | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.format.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.format.md deleted file mode 100644 index f28d5b1bca7e5..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.format.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IndexPatternField](./kibana-plugin-plugins-data-public.indexpatternfield.md) > [format](./kibana-plugin-plugins-data-public.indexpatternfield.format.md) - -## IndexPatternField.format property - -Signature: - -```typescript -get format(): FieldFormat; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.indexpattern.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.indexpattern.md deleted file mode 100644 index 3d145cce9d07d..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.indexpattern.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [IndexPatternField](./kibana-plugin-plugins-data-public.indexpatternfield.md) > [indexPattern](./kibana-plugin-plugins-data-public.indexpatternfield.indexpattern.md) - -## IndexPatternField.indexPattern property - -Signature: - -```typescript -readonly indexPattern: IndexPattern; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.md index 713b29ea3a3d3..215188ffa2607 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.md @@ -14,7 +14,7 @@ export declare class IndexPatternField implements IFieldType | Constructor | Modifiers | Description | | --- | --- | --- | -| [(constructor)(indexPattern, spec, displayName, onNotification)](./kibana-plugin-plugins-data-public.indexpatternfield._constructor_.md) | | Constructs a new instance of the IndexPatternField class | +| [(constructor)(spec, displayName)](./kibana-plugin-plugins-data-public.indexpatternfield._constructor_.md) | | Constructs a new instance of the IndexPatternField class | ## Properties @@ -26,8 +26,6 @@ export declare class IndexPatternField implements IFieldType | [displayName](./kibana-plugin-plugins-data-public.indexpatternfield.displayname.md) | | string | | | [esTypes](./kibana-plugin-plugins-data-public.indexpatternfield.estypes.md) | | string[] | undefined | | | [filterable](./kibana-plugin-plugins-data-public.indexpatternfield.filterable.md) | | boolean | | -| [format](./kibana-plugin-plugins-data-public.indexpatternfield.format.md) | | FieldFormat | | -| [indexPattern](./kibana-plugin-plugins-data-public.indexpatternfield.indexpattern.md) | | IndexPattern | | | [lang](./kibana-plugin-plugins-data-public.indexpatternfield.lang.md) | | string | undefined | | | [name](./kibana-plugin-plugins-data-public.indexpatternfield.name.md) | | string | | | [readFromDocValues](./kibana-plugin-plugins-data-public.indexpatternfield.readfromdocvalues.md) | | boolean | | @@ -45,5 +43,5 @@ export declare class IndexPatternField implements IFieldType | Method | Modifiers | Description | | --- | --- | --- | | [toJSON()](./kibana-plugin-plugins-data-public.indexpatternfield.tojson.md) | | | -| [toSpec()](./kibana-plugin-plugins-data-public.indexpatternfield.tospec.md) | | | +| [toSpec({ getFormatterForField, })](./kibana-plugin-plugins-data-public.indexpatternfield.tospec.md) | | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.tospec.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.tospec.md index 5037cb0049e82..1d80c90991f55 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.tospec.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternfield.tospec.md @@ -7,7 +7,9 @@ Signature: ```typescript -toSpec(): { +toSpec({ getFormatterForField, }?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }): { count: number; script: string | undefined; lang: string | undefined; @@ -20,9 +22,19 @@ toSpec(): { aggregatable: boolean; readFromDocValues: boolean; subType: import("../types").IFieldSubType | undefined; - format: any; + format: { + id: any; + params: any; + } | undefined; }; ``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| { getFormatterForField, } | {
getFormatterForField?: IndexPattern['getFormatterForField'];
} | | + Returns: `{ @@ -38,6 +50,9 @@ toSpec(): { aggregatable: boolean; readFromDocValues: boolean; subType: import("../types").IFieldSubType | undefined; - format: any; + format: { + id: any; + params: any; + } | undefined; }` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.abortsignal.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.abortsignal.md new file mode 100644 index 0000000000000..fd8d322d54b26 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.abortsignal.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [ISearchOptions](./kibana-plugin-plugins-data-public.isearchoptions.md) > [abortSignal](./kibana-plugin-plugins-data-public.isearchoptions.abortsignal.md) + +## ISearchOptions.abortSignal property + +An `AbortSignal` that allows the caller of `search` to abort a search request. + +Signature: + +```typescript +abortSignal?: AbortSignal; +``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md index 3eb38dc7d52e0..c9018b0048aa3 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md @@ -14,6 +14,6 @@ export interface ISearchOptions | Property | Type | Description | | --- | --- | --- | -| [signal](./kibana-plugin-plugins-data-public.isearchoptions.signal.md) | AbortSignal | | -| [strategy](./kibana-plugin-plugins-data-public.isearchoptions.strategy.md) | string | | +| [abortSignal](./kibana-plugin-plugins-data-public.isearchoptions.abortsignal.md) | AbortSignal | An AbortSignal that allows the caller of search to abort a search request. | +| [strategy](./kibana-plugin-plugins-data-public.isearchoptions.strategy.md) | string | Use this option to force using a specific server side search strategy. Leave empty to use the default strategy. | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.signal.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.signal.md deleted file mode 100644 index 10bd186d55baa..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.signal.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [ISearchOptions](./kibana-plugin-plugins-data-public.isearchoptions.md) > [signal](./kibana-plugin-plugins-data-public.isearchoptions.signal.md) - -## ISearchOptions.signal property - -Signature: - -```typescript -signal?: AbortSignal; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.strategy.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.strategy.md index df7e050691a8f..bd2580957f6c1 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.strategy.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.strategy.md @@ -4,6 +4,8 @@ ## ISearchOptions.strategy property +Use this option to force using a specific server side search strategy. Leave empty to use the default strategy. + Signature: ```typescript diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md index 09702df4fdb54..0c493ca492953 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md @@ -10,7 +10,6 @@ | --- | --- | | [AggParamType](./kibana-plugin-plugins-data-public.aggparamtype.md) | | | [FieldFormat](./kibana-plugin-plugins-data-public.fieldformat.md) | | -| [FieldList](./kibana-plugin-plugins-data-public.fieldlist.md) | | | [FilterManager](./kibana-plugin-plugins-data-public.filtermanager.md) | | | [IndexPattern](./kibana-plugin-plugins-data-public.indexpattern.md) | | | [IndexPatternField](./kibana-plugin-plugins-data-public.indexpatternfield.md) | | @@ -51,7 +50,6 @@ | [DataPublicPluginSetup](./kibana-plugin-plugins-data-public.datapublicpluginsetup.md) | | | [DataPublicPluginStart](./kibana-plugin-plugins-data-public.datapublicpluginstart.md) | | | [EsQueryConfig](./kibana-plugin-plugins-data-public.esqueryconfig.md) | | -| [FetchOptions](./kibana-plugin-plugins-data-public.fetchoptions.md) | | | [FieldFormatConfig](./kibana-plugin-plugins-data-public.fieldformatconfig.md) | | | [FieldMappingSpec](./kibana-plugin-plugins-data-public.fieldmappingspec.md) | | | [Filter](./kibana-plugin-plugins-data-public.filter.md) | | @@ -71,6 +69,7 @@ | [OptionedValueProp](./kibana-plugin-plugins-data-public.optionedvalueprop.md) | | | [Query](./kibana-plugin-plugins-data-public.query.md) | | | [QueryState](./kibana-plugin-plugins-data-public.querystate.md) | All query state service state | +| [QueryStateChange](./kibana-plugin-plugins-data-public.querystatechange.md) | | | [QuerySuggestionBasic](./kibana-plugin-plugins-data-public.querysuggestionbasic.md) | \* | | [QuerySuggestionField](./kibana-plugin-plugins-data-public.querysuggestionfield.md) | \* | | [QuerySuggestionGetFnArgs](./kibana-plugin-plugins-data-public.querysuggestiongetfnargs.md) | \* | @@ -103,6 +102,7 @@ | [expandShorthand](./kibana-plugin-plugins-data-public.expandshorthand.md) | | | [extractSearchSourceReferences](./kibana-plugin-plugins-data-public.extractsearchsourcereferences.md) | | | [fieldFormats](./kibana-plugin-plugins-data-public.fieldformats.md) | | +| [fieldList](./kibana-plugin-plugins-data-public.fieldlist.md) | | | [FilterBar](./kibana-plugin-plugins-data-public.filterbar.md) | | | [getKbnTypeNames](./kibana-plugin-plugins-data-public.getkbntypenames.md) | Get the esTypes known by all kbnFieldTypes {Array} | | [indexPatterns](./kibana-plugin-plugins-data-public.indexpatterns.md) | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.appfilters.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.appfilters.md new file mode 100644 index 0000000000000..b358e9477e515 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.appfilters.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [QueryStateChange](./kibana-plugin-plugins-data-public.querystatechange.md) > [appFilters](./kibana-plugin-plugins-data-public.querystatechange.appfilters.md) + +## QueryStateChange.appFilters property + +Signature: + +```typescript +appFilters?: boolean; +``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.globalfilters.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.globalfilters.md new file mode 100644 index 0000000000000..c395f169c35a5 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.globalfilters.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [QueryStateChange](./kibana-plugin-plugins-data-public.querystatechange.md) > [globalFilters](./kibana-plugin-plugins-data-public.querystatechange.globalfilters.md) + +## QueryStateChange.globalFilters property + +Signature: + +```typescript +globalFilters?: boolean; +``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.md new file mode 100644 index 0000000000000..71fb211da11d2 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystatechange.md @@ -0,0 +1,19 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [QueryStateChange](./kibana-plugin-plugins-data-public.querystatechange.md) + +## QueryStateChange interface + +Signature: + +```typescript +export interface QueryStateChange extends QueryStateChangePartial +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [appFilters](./kibana-plugin-plugins-data-public.querystatechange.appfilters.md) | boolean | | +| [globalFilters](./kibana-plugin-plugins-data-public.querystatechange.globalfilters.md) | boolean | | + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystringinput.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystringinput.md index e139b326b7500..3dbfd9430e913 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystringinput.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.querystringinput.md @@ -7,5 +7,5 @@ Signature: ```typescript -QueryStringInput: React.FC> +QueryStringInput: React.FC> ``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md index 498691c06285d..d1d20291a6799 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md @@ -7,7 +7,7 @@ Signature: ```typescript -SearchBar: React.ComponentClass, "query" | "isLoading" | "filters" | "onRefresh" | "onRefreshChange" | "refreshInterval" | "indexPatterns" | "dataTestSubj" | "customSubmitButton" | "screenTitle" | "showQueryBar" | "showQueryInput" | "showFilterBar" | "showDatePicker" | "showAutoRefreshOnly" | "isRefreshPaused" | "dateRangeFrom" | "dateRangeTo" | "showSaveQuery" | "savedQuery" | "onQueryChange" | "onQuerySubmit" | "onSaved" | "onSavedQueryUpdated" | "onClearSavedQuery" | "indicateNoData" | "timeHistory" | "onFiltersUpdated">, any> & { - WrappedComponent: React.ComponentType & ReactIntl.InjectedIntlProps>; +SearchBar: React.ComponentClass, "query" | "isLoading" | "filters" | "onRefresh" | "onRefreshChange" | "refreshInterval" | "indexPatterns" | "dataTestSubj" | "timeHistory" | "customSubmitButton" | "screenTitle" | "showQueryBar" | "showQueryInput" | "showFilterBar" | "showDatePicker" | "showAutoRefreshOnly" | "isRefreshPaused" | "dateRangeFrom" | "dateRangeTo" | "showSaveQuery" | "savedQuery" | "onQueryChange" | "onQuerySubmit" | "onSaved" | "onSavedQueryUpdated" | "onClearSavedQuery" | "indicateNoData" | "onFiltersUpdated">, any> & { + WrappedComponent: React.ComponentType & ReactIntl.InjectedIntlProps>; } ``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor._constructor_.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor._constructor_.md index 6f5dd1076fb40..4c67639300883 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor._constructor_.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor._constructor_.md @@ -4,12 +4,12 @@ ## SearchInterceptor.(constructor) -This class should be instantiated with a `requestTimeout` corresponding with how many ms after requests are initiated that they should automatically cancel. +Constructs a new instance of the `SearchInterceptor` class Signature: ```typescript -constructor(deps: SearchInterceptorDeps, requestTimeout?: number | undefined); +constructor(deps: SearchInterceptorDeps); ``` ## Parameters @@ -17,5 +17,4 @@ constructor(deps: SearchInterceptorDeps, requestTimeout?: number | undefined); | Parameter | Type | Description | | --- | --- | --- | | deps | SearchInterceptorDeps | | -| requestTimeout | number | undefined | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md index 32954927504ae..fd9f23a7f0052 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md @@ -14,21 +14,18 @@ export declare class SearchInterceptor | Constructor | Modifiers | Description | | --- | --- | --- | -| [(constructor)(deps, requestTimeout)](./kibana-plugin-plugins-data-public.searchinterceptor._constructor_.md) | | This class should be instantiated with a requestTimeout corresponding with how many ms after requests are initiated that they should automatically cancel. | +| [(constructor)(deps)](./kibana-plugin-plugins-data-public.searchinterceptor._constructor_.md) | | Constructs a new instance of the SearchInterceptor class | ## Properties | Property | Modifiers | Type | Description | | --- | --- | --- | --- | | [deps](./kibana-plugin-plugins-data-public.searchinterceptor.deps.md) | | SearchInterceptorDeps | | -| [requestTimeout](./kibana-plugin-plugins-data-public.searchinterceptor.requesttimeout.md) | | number | undefined | | ## Methods | Method | Modifiers | Description | | --- | --- | --- | | [getPendingCount$()](./kibana-plugin-plugins-data-public.searchinterceptor.getpendingcount_.md) | | Returns an Observable over the current number of pending searches. This could mean that one of the search requests is still in flight, or that it has only received partial responses. | -| [runSearch(request, signal, strategy)](./kibana-plugin-plugins-data-public.searchinterceptor.runsearch.md) | | | | [search(request, options)](./kibana-plugin-plugins-data-public.searchinterceptor.search.md) | | Searches using the given search method. Overrides the AbortSignal with one that will abort either when cancelPending is called, when the request times out, or when the original AbortSignal is aborted. Updates pendingCount$ when the request is started/finalized. | -| [setupTimers(options)](./kibana-plugin-plugins-data-public.searchinterceptor.setuptimers.md) | | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.requesttimeout.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.requesttimeout.md deleted file mode 100644 index 3123433762991..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.requesttimeout.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [SearchInterceptor](./kibana-plugin-plugins-data-public.searchinterceptor.md) > [requestTimeout](./kibana-plugin-plugins-data-public.searchinterceptor.requesttimeout.md) - -## SearchInterceptor.requestTimeout property - -Signature: - -```typescript -protected readonly requestTimeout?: number | undefined; -``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.runsearch.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.runsearch.md deleted file mode 100644 index ad1d1dcb59d7b..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.runsearch.md +++ /dev/null @@ -1,24 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [SearchInterceptor](./kibana-plugin-plugins-data-public.searchinterceptor.md) > [runSearch](./kibana-plugin-plugins-data-public.searchinterceptor.runsearch.md) - -## SearchInterceptor.runSearch() method - -Signature: - -```typescript -protected runSearch(request: IEsSearchRequest, signal: AbortSignal, strategy?: string): Observable; -``` - -## Parameters - -| Parameter | Type | Description | -| --- | --- | --- | -| request | IEsSearchRequest | | -| signal | AbortSignal | | -| strategy | string | | - -Returns: - -`Observable` - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.setuptimers.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.setuptimers.md deleted file mode 100644 index fe35655258b4c..0000000000000 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.setuptimers.md +++ /dev/null @@ -1,28 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [SearchInterceptor](./kibana-plugin-plugins-data-public.searchinterceptor.md) > [setupTimers](./kibana-plugin-plugins-data-public.searchinterceptor.setuptimers.md) - -## SearchInterceptor.setupTimers() method - -Signature: - -```typescript -protected setupTimers(options?: ISearchOptions): { - combinedSignal: AbortSignal; - cleanup: () => void; - }; -``` - -## Parameters - -| Parameter | Type | Description | -| --- | --- | --- | -| options | ISearchOptions | | - -Returns: - -`{ - combinedSignal: AbortSignal; - cleanup: () => void; - }` - diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ui_settings.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ui_settings.md index e515c3513df6c..6ed20beb396f1 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ui_settings.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.ui_settings.md @@ -20,6 +20,7 @@ UI_SETTINGS: { readonly COURIER_MAX_CONCURRENT_SHARD_REQUESTS: "courier:maxConcurrentShardRequests"; readonly COURIER_BATCH_SEARCHES: "courier:batchSearches"; readonly SEARCH_INCLUDE_FROZEN: "search:includeFrozen"; + readonly SEARCH_TIMEOUT: "search:timeout"; readonly HISTOGRAM_BAR_TARGET: "histogram:barTarget"; readonly HISTOGRAM_MAX_BARS: "histogram:maxBars"; readonly HISTORY_LIMIT: "history:limit"; diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.es_search_strategy.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.es_search_strategy.md new file mode 100644 index 0000000000000..8fac5cf4d7a9e --- /dev/null +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.es_search_strategy.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ES\_SEARCH\_STRATEGY](./kibana-plugin-plugins-data-server.es_search_strategy.md) + +## ES\_SEARCH\_STRATEGY variable + +Signature: + +```typescript +ES_SEARCH_STRATEGY = "es" +``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.getdefaultsearchparams.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.getdefaultsearchparams.md index 9de005c1fd0dd..e718ca42ca30f 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.getdefaultsearchparams.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.getdefaultsearchparams.md @@ -7,24 +7,26 @@ Signature: ```typescript -export declare function getDefaultSearchParams(config: SharedGlobalConfig): { - timeout: string; +export declare function getDefaultSearchParams(uiSettingsClient: IUiSettingsClient): Promise<{ + maxConcurrentShardRequests: number | undefined; + ignoreThrottled: boolean; ignoreUnavailable: boolean; - restTotalHitsAsInt: boolean; -}; + trackTotalHits: boolean; +}>; ``` ## Parameters | Parameter | Type | Description | | --- | --- | --- | -| config | SharedGlobalConfig | | +| uiSettingsClient | IUiSettingsClient | | Returns: -`{ - timeout: string; +`Promise<{ + maxConcurrentShardRequests: number | undefined; + ignoreThrottled: boolean; ignoreUnavailable: boolean; - restTotalHitsAsInt: boolean; -}` + trackTotalHits: boolean; +}>` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.getshardtimeout.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.getshardtimeout.md new file mode 100644 index 0000000000000..d7e2a597ff33d --- /dev/null +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.getshardtimeout.md @@ -0,0 +1,30 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [getShardTimeout](./kibana-plugin-plugins-data-server.getshardtimeout.md) + +## getShardTimeout() function + +Signature: + +```typescript +export declare function getShardTimeout(config: SharedGlobalConfig): { + timeout: string; +} | { + timeout?: undefined; +}; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| config | SharedGlobalConfig | | + +Returns: + +`{ + timeout: string; +} | { + timeout?: undefined; +}` + diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.md index 77a2954428f8d..d106f3a35a91c 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.md @@ -28,7 +28,7 @@ export interface IFieldType | [searchable](./kibana-plugin-plugins-data-server.ifieldtype.searchable.md) | boolean | | | [sortable](./kibana-plugin-plugins-data-server.ifieldtype.sortable.md) | boolean | | | [subType](./kibana-plugin-plugins-data-server.ifieldtype.subtype.md) | IFieldSubType | | -| [toSpec](./kibana-plugin-plugins-data-server.ifieldtype.tospec.md) | () => FieldSpec | | +| [toSpec](./kibana-plugin-plugins-data-server.ifieldtype.tospec.md) | (options?: {
getFormatterForField?: IndexPattern['getFormatterForField'];
}) => FieldSpec | | | [type](./kibana-plugin-plugins-data-server.ifieldtype.type.md) | string | | | [visualizable](./kibana-plugin-plugins-data-server.ifieldtype.visualizable.md) | boolean | | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.tospec.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.tospec.md index d1863bebce4f0..6f8ee9d9eebf0 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.tospec.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ifieldtype.tospec.md @@ -7,5 +7,7 @@ Signature: ```typescript -toSpec?: () => FieldSpec; +toSpec?: (options?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }) => FieldSpec; ``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.abortsignal.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.abortsignal.md new file mode 100644 index 0000000000000..693345f480a9a --- /dev/null +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.abortsignal.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ISearchOptions](./kibana-plugin-plugins-data-server.isearchoptions.md) > [abortSignal](./kibana-plugin-plugins-data-server.isearchoptions.abortsignal.md) + +## ISearchOptions.abortSignal property + +An `AbortSignal` that allows the caller of `search` to abort a search request. + +Signature: + +```typescript +abortSignal?: AbortSignal; +``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md index 002ce864a1aa4..21ddaef3a0b94 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md @@ -14,6 +14,6 @@ export interface ISearchOptions | Property | Type | Description | | --- | --- | --- | -| [signal](./kibana-plugin-plugins-data-server.isearchoptions.signal.md) | AbortSignal | An AbortSignal that allows the caller of search to abort a search request. | -| [strategy](./kibana-plugin-plugins-data-server.isearchoptions.strategy.md) | string | | +| [abortSignal](./kibana-plugin-plugins-data-server.isearchoptions.abortsignal.md) | AbortSignal | An AbortSignal that allows the caller of search to abort a search request. | +| [strategy](./kibana-plugin-plugins-data-server.isearchoptions.strategy.md) | string | Use this option to force using a specific server side search strategy. Leave empty to use the default strategy. | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.signal.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.signal.md deleted file mode 100644 index 948dfd66da7a0..0000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.signal.md +++ /dev/null @@ -1,13 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ISearchOptions](./kibana-plugin-plugins-data-server.isearchoptions.md) > [signal](./kibana-plugin-plugins-data-server.isearchoptions.signal.md) - -## ISearchOptions.signal property - -An `AbortSignal` that allows the caller of `search` to abort a search request. - -Signature: - -```typescript -signal?: AbortSignal; -``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.strategy.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.strategy.md index 6df72d023e2c0..65da7fddd13f6 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.strategy.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.strategy.md @@ -4,6 +4,8 @@ ## ISearchOptions.strategy property +Use this option to force using a specific server side search strategy. Leave empty to use the default strategy. + Signature: ```typescript diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md index 62d954cb80eb7..577532d22b3d3 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md @@ -16,5 +16,5 @@ export interface ISearchStartAggsStart | | | [getSearchStrategy](./kibana-plugin-plugins-data-server.isearchstart.getsearchstrategy.md) | (name: string) => ISearchStrategy<SearchStrategyRequest, SearchStrategyResponse> | Get other registered search strategies. For example, if a new strategy needs to use the already-registered ES search strategy, it can use this function to accomplish that. | -| [search](./kibana-plugin-plugins-data-server.isearchstart.search.md) | (context: RequestHandlerContext, request: IKibanaSearchRequest, options: ISearchOptions) => Promise<IKibanaSearchResponse> | | +| [search](./kibana-plugin-plugins-data-server.isearchstart.search.md) | (context: RequestHandlerContext, request: IEsSearchRequest, options: ISearchOptions) => Promise<IEsSearchResponse> | | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.search.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.search.md index 1c2ae91699559..33ca818afc769 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.search.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.search.md @@ -7,5 +7,5 @@ Signature: ```typescript -search: (context: RequestHandlerContext, request: IKibanaSearchRequest, options: ISearchOptions) => Promise; +search: (context: RequestHandlerContext, request: IEsSearchRequest, options: ISearchOptions) => Promise; ``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md index 0292e08063fbb..f5b587d86b349 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md @@ -26,11 +26,13 @@ | Function | Description | | --- | --- | -| [getDefaultSearchParams(config)](./kibana-plugin-plugins-data-server.getdefaultsearchparams.md) | | +| [getDefaultSearchParams(uiSettingsClient)](./kibana-plugin-plugins-data-server.getdefaultsearchparams.md) | | +| [getShardTimeout(config)](./kibana-plugin-plugins-data-server.getshardtimeout.md) | | | [getTime(indexPattern, timeRange, options)](./kibana-plugin-plugins-data-server.gettime.md) | | | [parseInterval(interval)](./kibana-plugin-plugins-data-server.parseinterval.md) | | | [plugin(initializerContext)](./kibana-plugin-plugins-data-server.plugin.md) | Static code to be shared externally | | [shouldReadFieldFromDocValues(aggregatable, esType)](./kibana-plugin-plugins-data-server.shouldreadfieldfromdocvalues.md) | | +| [toSnakeCase(obj)](./kibana-plugin-plugins-data-server.tosnakecase.md) | | | [usageProvider(core)](./kibana-plugin-plugins-data-server.usageprovider.md) | | ## Interfaces @@ -71,6 +73,7 @@ | [AggGroupNames](./kibana-plugin-plugins-data-server.agggroupnames.md) | | | [castEsToKbnFieldTypeName](./kibana-plugin-plugins-data-server.castestokbnfieldtypename.md) | Get the KbnFieldType name for an esType string | | [config](./kibana-plugin-plugins-data-server.config.md) | | +| [ES\_SEARCH\_STRATEGY](./kibana-plugin-plugins-data-server.es_search_strategy.md) | | | [esFilters](./kibana-plugin-plugins-data-server.esfilters.md) | | | [esKuery](./kibana-plugin-plugins-data-server.eskuery.md) | | | [esQuery](./kibana-plugin-plugins-data-server.esquery.md) | | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md index 2d9104ef894bc..455c5ecdd8195 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md @@ -8,7 +8,7 @@ ```typescript start(core: CoreStart): { - search: ISearchStart>; + search: ISearchStart>; fieldFormats: { fieldFormatServiceFactory: (uiSettings: import("../../../core/server").IUiSettingsClient) => Promise; }; @@ -27,7 +27,7 @@ start(core: CoreStart): { Returns: `{ - search: ISearchStart>; + search: ISearchStart>; fieldFormats: { fieldFormatServiceFactory: (uiSettings: import("../../../core/server").IUiSettingsClient) => Promise; }; diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.tosnakecase.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.tosnakecase.md new file mode 100644 index 0000000000000..eda9e9c312e59 --- /dev/null +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.tosnakecase.md @@ -0,0 +1,22 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [toSnakeCase](./kibana-plugin-plugins-data-server.tosnakecase.md) + +## toSnakeCase() function + +Signature: + +```typescript +export declare function toSnakeCase(obj: Record): import("lodash").Dictionary; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| obj | Record<string, any> | | + +Returns: + +`import("lodash").Dictionary` + diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ui_settings.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ui_settings.md index e419b64cd43aa..2d4ce75b956df 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ui_settings.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.ui_settings.md @@ -20,6 +20,7 @@ UI_SETTINGS: { readonly COURIER_MAX_CONCURRENT_SHARD_REQUESTS: "courier:maxConcurrentShardRequests"; readonly COURIER_BATCH_SEARCHES: "courier:batchSearches"; readonly SEARCH_INCLUDE_FROZEN: "search:includeFrozen"; + readonly SEARCH_TIMEOUT: "search:timeout"; readonly HISTOGRAM_BAR_TARGET: "histogram:barTarget"; readonly HISTOGRAM_MAX_BARS: "histogram:maxBars"; readonly HISTORY_LIMIT: "history:limit"; diff --git a/docs/discover/search.asciidoc b/docs/discover/search.asciidoc index eef2a12a964b8..da58382deb89a 100644 --- a/docs/discover/search.asciidoc +++ b/docs/discover/search.asciidoc @@ -1,7 +1,7 @@ [[search]] == Search data Many Kibana apps embed a query bar for real-time search, including -*Discover*, *Visualize*, and *Dashboard*. +*Discover* and *Dashboard*. [float] === Search your data @@ -84,7 +84,7 @@ query language you can also submit queries using the {ref}/query-dsl.html[Elasti [[save-open-search]] === Save a search -A saved search persists your current view of Discover for later retrieval and reuse. You can reload a saved search into Discover, add it to a dashboard, and use it as the basis for a <>. +A saved search persists your current view of Discover for later retrieval and reuse. You can reload a saved search into Discover, add it to a dashboard, and use it as the basis for a visualization. A saved search includes the query text, filters, and optionally, the time filter. A saved search also includes the selected columns in the document table, the sort order, and the current index pattern. @@ -120,7 +120,7 @@ used for the saved search will also be automatically selected. [[save-load-delete-query]] === Save a query -A saved query is a portable collection of query text and filters that you can reuse in <>, <>, and <>. Save a query when you want to: +A saved query is a portable collection of query text and filters that you can reuse in <> and <>. Save a query when you want to: * Retrieve results from the same query at a later time without having to reenter the query text, add the filters or set the time filter * View the results of the same query in multiple apps @@ -148,7 +148,7 @@ image::discover/images/saved-query-save-form-default-filters.png["Example of the . Click *Save*. ==== Load a query -To load a saved query into Discover, Dashboard, or Visualize: +To load a saved query into Discover or Dashboard: . Click *#* in the search bar, next to the query text input. . Select the query you want to load. You might need to scroll down to find the query you are looking for. diff --git a/docs/drilldowns/drilldowns.asciidoc b/docs/drilldowns/drilldowns.asciidoc deleted file mode 100644 index e2dfaa5af39ce..0000000000000 --- a/docs/drilldowns/drilldowns.asciidoc +++ /dev/null @@ -1,108 +0,0 @@ -[[drilldowns]] -== Use drilldowns for dashboard actions - -Drilldowns, also known as custom actions, allow you to configure a -workflow for analyzing and troubleshooting your data. -Using a drilldown, you can navigate from one dashboard to another, -taking the current time range, filters, and other parameters with you, -so the context remains the same. You can continue your analysis from a new perspective. - -For example, you might have a dashboard that shows the overall status of multiple data centers. -You can create a drilldown that navigates from this dashboard to a dashboard -that shows a single data center or server. - -[float] -[[how-drilldowns-work]] -=== How drilldowns work - -Drilldowns are user-configurable {kib} actions that are stored with the -dashboard metadata. Drilldowns are specific to the dashboard panel -for which you create them—they are not shared across panels. -A panel can have multiple drilldowns. - -This example shows a dashboard panel that contains a pie chart. -Typically, clicking a pie slice applies the current filter. -When a panel has a drilldown, clicking a pie slice opens a menu with -the default action and your drilldowns. Refer to the <> -for instructions on how to create this drilldown. - -[role="screenshot"] -image::images/drilldown_on_piechart.gif[Drilldown on pie chart that navigates to another dashboard] - -Third-party developers can create drilldowns. -Refer to https://github.com/elastic/kibana/tree/master/x-pack/examples/ui_actions_enhanced_examples[this example plugin] -to learn how to code drilldowns. - -[float] -[[create-manage-drilldowns]] -=== Create and manage drilldowns - -Your dashboard must be in *Edit* mode to create a drilldown. -Once a panel has at least one drilldown, the menu also includes a *Manage drilldowns* action -for editing and deleting drilldowns. - -[role="screenshot"] -image::images/drilldown_menu.png[Panel menu with Create drilldown and Manage drilldown actions] - -[float] -[[drilldowns-example]] -=== Try it: Create a drilldown - -This example shows how to create the *Host Overview* drilldown shown earlier in this doc. - -[float] -==== Set up the dashboards - -. Add the <> data set. - -. Create a new dashboard, called `Host Overview`, and include these visualizations -from the sample data set: -+ -[%hardbreaks] -*[Logs] Heatmap* -*[Logs] Visitors by OS* -*[Logs] Host, Visits, and Bytes Table* -*[Logs] Total Requests and Bytes* -+ -TIP: If you don’t see data for a panel, try changing the time range. - -. Open the *[Logs] Web traffic* dashboard. - -. Set a search and filter. -+ -[%hardbreaks] -Search: `extension.keyword:( “gz” or “css” or “deb”)` -Filter: `geo.src : CN` - -[float] -==== Create the drilldown - - -. In the dashboard menu bar, click *Edit*. - -. In *[Logs] Visitors by OS*, open the panel menu, and then select *Create drilldown*. - -. Give the drilldown a name. - -. Select *Host Overview* as the destination dashboard. - -. Keep both filters enabled so that the drilldown carries over the global filters and date range. -+ -Your input should look similar to this: -+ -[role="screenshot"] -image::images/drilldown_create.png[Create drilldown with entries for drilldown name and destination] - -. Click *Create drilldown.* - -. Save the dashboard. -+ -If you don’t save the drilldown, and then navigate away, the drilldown is lost. - -. In *[Logs] Visitors by OS*, click the `win 8` slice of the pie, and then select the name of your drilldown. -+ -[role="screenshot"] -image::images/drilldown_on_panel.png[Drilldown on pie chart that navigates to another dashboard] -+ -You are navigated to your destination dashboard. Verify that the search query, filters, -and time range are carried over. diff --git a/docs/drilldowns/explore-underlying-data.asciidoc b/docs/drilldowns/explore-underlying-data.asciidoc deleted file mode 100644 index c2bba599730d8..0000000000000 --- a/docs/drilldowns/explore-underlying-data.asciidoc +++ /dev/null @@ -1,41 +0,0 @@ -[[explore-underlying-data]] -== Explore the underlying data for a visualization - -++++ -Explore the underlying data -++++ - -Dashboard panels have an *Explore underlying data* action that navigates you to *Discover*, -where you can narrow your documents to the ones you'll most likely use in a visualization. -This action is available for visualizations backed by a single index pattern. - -You can access *Explore underlying data* in two ways: from the panel context -menu or from the menu that appears when you interact with the chart. - -[float] -[[explore-data-from-panel-context-menu]] -=== Explore data from panel context menu - -The *Explore underlying data* action in the panel menu navigates you to Discover, -carrying over the index pattern, filters, query, and time range for the visualization. - -[role="screenshot"] -image::images/explore_data_context_menu.png[Explore underlying data from panel context menu] - -[float] -[[explore-data-from-chart]] -=== Explore data from chart action - -Initiating *Explore underlying data* from the chart also navigates to Discover, -carrying over the current context for the visualization. In addition, this action -applies the filters and time range created by the events that triggered the action. - -[role="screenshot"] -image::images/explore_data_in_chart.png[Explore underlying data from chart] - -To enable this action add the following line to your `kibana.yml` config. - -["source","yml"] ------------ -xpack.discoverEnhanced.actions.exploreDataInChart.enabled: true ------------ diff --git a/docs/getting-started/add-sample-data.asciidoc b/docs/getting-started/add-sample-data.asciidoc deleted file mode 100644 index ab43431601888..0000000000000 --- a/docs/getting-started/add-sample-data.asciidoc +++ /dev/null @@ -1,28 +0,0 @@ -[[add-sample-data]] -== Add sample data - -{kib} has several sample data sets that you can use to explore {kib} before loading your own data. -These sample data sets showcase a variety of use cases: - -* *eCommerce orders* includes visualizations for product-related information, -such as cost, revenue, and price. -* *Flight data* enables you to view and interact with flight routes. -* *Web logs* lets you analyze website traffic. - -To get started, go to the {kib} home page and click the link underneath *Add sample data*. - -Once you've loaded a data set, click *View data* to view prepackaged -visualizations, dashboards, Canvas workpads, Maps, and Machine Learning jobs. - -[role="screenshot"] -image::images/add-sample-data.png[] - -NOTE: The timestamps in the sample data sets are relative to when they are installed. -If you uninstall and reinstall a data set, the timestamps will change to reflect the most recent installation. - -[float] -=== Next steps - -* Explore {kib} by following the <>. - -* Learn how to load data, define index patterns, and build visualizations by <>. diff --git a/docs/getting-started/images/gs_maps_time_filter.png b/docs/getting-started/images/gs_maps_time_filter.png new file mode 100644 index 0000000000000..83e20c279906e Binary files /dev/null and b/docs/getting-started/images/gs_maps_time_filter.png differ diff --git a/docs/getting-started/images/tutorial-discover-2.png b/docs/getting-started/images/tutorial-discover-2.png index 7190c90d8e5ba..681e4834de830 100644 Binary files a/docs/getting-started/images/tutorial-discover-2.png and b/docs/getting-started/images/tutorial-discover-2.png differ diff --git a/docs/getting-started/images/tutorial-pattern-1.png b/docs/getting-started/images/tutorial-pattern-1.png index 8a289f93fc66e..0026b18775518 100644 Binary files a/docs/getting-started/images/tutorial-pattern-1.png and b/docs/getting-started/images/tutorial-pattern-1.png differ diff --git a/docs/getting-started/images/tutorial-visualize-bar-1.5.png b/docs/getting-started/images/tutorial-visualize-bar-1.5.png index c02b9ca59dff5..009152f9407e4 100644 Binary files a/docs/getting-started/images/tutorial-visualize-bar-1.5.png and b/docs/getting-started/images/tutorial-visualize-bar-1.5.png differ diff --git a/docs/getting-started/images/tutorial-visualize-map-2.png b/docs/getting-started/images/tutorial-visualize-map-2.png index f4d1d0e47fe6a..ed2fd47cb27de 100644 Binary files a/docs/getting-started/images/tutorial-visualize-map-2.png and b/docs/getting-started/images/tutorial-visualize-map-2.png differ diff --git a/docs/getting-started/images/tutorial-visualize-md-2.png b/docs/getting-started/images/tutorial-visualize-md-2.png index 9e9a670ba196f..af56faa3b0516 100644 Binary files a/docs/getting-started/images/tutorial-visualize-md-2.png and b/docs/getting-started/images/tutorial-visualize-md-2.png differ diff --git a/docs/getting-started/images/tutorial-visualize-pie-2.png b/docs/getting-started/images/tutorial-visualize-pie-2.png index ef5d62b4ceee7..ca8f5e92146bc 100644 Binary files a/docs/getting-started/images/tutorial-visualize-pie-2.png and b/docs/getting-started/images/tutorial-visualize-pie-2.png differ diff --git a/docs/getting-started/images/tutorial-visualize-pie-3.png b/docs/getting-started/images/tutorial-visualize-pie-3.png index 6974c8d34b0dd..59fce360096c0 100644 Binary files a/docs/getting-started/images/tutorial-visualize-pie-3.png and b/docs/getting-started/images/tutorial-visualize-pie-3.png differ diff --git a/docs/getting-started/images/tutorial_index_patterns.png b/docs/getting-started/images/tutorial_index_patterns.png new file mode 100644 index 0000000000000..430baf898b612 Binary files /dev/null and b/docs/getting-started/images/tutorial_index_patterns.png differ diff --git a/docs/getting-started/tutorial-dashboard.asciidoc b/docs/getting-started/tutorial-dashboard.asciidoc deleted file mode 100644 index 2ee2d76024aed..0000000000000 --- a/docs/getting-started/tutorial-dashboard.asciidoc +++ /dev/null @@ -1,53 +0,0 @@ -[[tutorial-dashboard]] -=== Add the visualizations to a dashboard - -Build a dashboard that contains the visualizations and map that you saved during -this tutorial. - -. Open the menu, go to *Dashboard*, then click *Create dashboard*. -. Set the time filter to May 18, 2015 to May 20, 2015. -. Click *Add*, then select the following: - * *Bar Example* - * *Map Example* - * *Markdown Example* - * *Pie Example* -+ -Your sample dashboard looks like this: -+ -[role="screenshot"] -image::images/tutorial-dashboard.png[] - -. Try out the editing controls. -+ -You can rearrange the visualizations by clicking a the header of a -visualization and dragging. The gear icon in the top right of a visualization -displays controls for editing and deleting the visualization. A resize control -is on the lower right. - -. *Save* your dashboard. - -==== Inspect the data - -Seeing visualizations of your data is great, -but sometimes you need to look at the actual data to -understand what's really going on. You can inspect the data behind any visualization -and view the {es} query used to retrieve it. - -. Click the pie chart *Options* menu, then select *Inspect*. -+ -[role="screenshot"] -image::images/tutorial-full-inspect1.png[] - -. To look at the query used to fetch the data for the visualization, select *View > Requests*. - -[float] -=== Next steps - -Now that you have the basics, you're ready to start exploring -your own data with {kib}. - -* To learn about searching and filtering your data, refer to {kibana-ref}/discover.html[Discover]. -* To learn about the visualization types {kib} has to offer, refer to {kibana-ref}/visualize.html[Visualize]. -* To learn about configuring {kib} and managing your saved objects, refer to {kibana-ref}/management.html[Management]. -* To learn about the interactive console you can use to submit REST requests to {es}, refer to {kibana-ref}/console-kibana.html[Console]. - diff --git a/docs/getting-started/tutorial-define-index.asciidoc b/docs/getting-started/tutorial-define-index.asciidoc index 254befa55faea..fbe7450683dbc 100644 --- a/docs/getting-started/tutorial-define-index.asciidoc +++ b/docs/getting-started/tutorial-define-index.asciidoc @@ -1,7 +1,7 @@ [[tutorial-define-index]] === Define your index patterns -Index patterns tell Kibana which Elasticsearch indices you want to explore. +Index patterns tell {kib} which {es} indices you want to explore. An index pattern can match the name of a single index, or include a wildcard (*) to match multiple indices. @@ -10,28 +10,29 @@ series of indices in the format `logstash-YYYY.MMM.DD`. To explore all of the log data from May 2018, you could specify the index pattern `logstash-2018.05*`. - [float] -==== Create your first index pattern +==== Create the index patterns First you'll create index patterns for the Shakespeare data set, which has an index named `shakespeare,` and the accounts data set, which has an index named `bank`. These data sets don't contain time series data. . Open the menu, then go to *Stack Management > {kib} > Index Patterns*. + . If this is your first index pattern, the *Create index pattern* page opens. -Otherwise, click *Create index pattern*. -. In the *Index pattern field*, enter `shakes*`. + +. In the *Index pattern name* field, enter `shakes*`. + [role="screenshot"] -image::images/tutorial-pattern-1.png[] +image::images/tutorial-pattern-1.png[shakes* index patterns] . Click *Next step*. -. Select the *Time Filter field name*, then click *Create index pattern*. + +. On the *Configure settings* page, *Create index pattern*. + You’re presented a table of all fields and associated data types in the index. -. Return to the *Index patterns* page and create a second index pattern named `ba*`. +. Create a second index pattern named `ba*`. [float] ==== Create an index pattern for the time series data @@ -39,15 +40,12 @@ You’re presented a table of all fields and associated data types in the index. Create an index pattern for the Logstash index, which contains the time series data. -. Define an index pattern named `logstash*`. -. Click *Next step*. -. From the *Time Filter field name* dropdown, select *@timestamp*. -. Click *Create index pattern*. +. Create an index pattern named `logstash*`, then click *Next step*. -NOTE: When you define an index pattern, the indices that match that pattern must -exist in Elasticsearch and they must contain data. To check which indices are -available, open the menu, then go to *Dev Tools > Console* and enter `GET _cat/indices`. Alternately, use -`curl -XGET "http://localhost:9200/_cat/indices"`. +. From the *Time field* dropdown, select *@timestamp, then click *Create index pattern*. ++ +[role="screenshot"] +image::images/tutorial_index_patterns.png[All tutorial index patterns] diff --git a/docs/getting-started/tutorial-discovering.asciidoc b/docs/getting-started/tutorial-discovering.asciidoc index 31d77be1275ee..ec07a74b8ac0d 100644 --- a/docs/getting-started/tutorial-discovering.asciidoc +++ b/docs/getting-started/tutorial-discovering.asciidoc @@ -1,9 +1,8 @@ -[[tutorial-discovering]] -=== Discover your data +[[explore-your-data]] +=== Explore your data -Using *Discover*, enter -an {ref}/query-dsl-query-string-query.html#query-string-syntax[Elasticsearch -query] to search your data and filter the results. +With *Discover*, you use {ref}/query-dsl-query-string-query.html#query-string-syntax[Elasticsearch +queries] to explore your data and narrow the results with filters. . Open the menu, then go to *Discover*. + @@ -13,7 +12,7 @@ The `shakes*` index pattern appears. + By default, all fields are shown for each matching document. -. In the *Search* field, enter the following: +. In the *Search* field, enter the following, then click *Update*: + [source,text] account_number<100 AND balance>47500 @@ -32,3 +31,5 @@ account numbers. + [role="screenshot"] image::images/tutorial-discover-3.png[] + +Now that you know what your documents contain, it's time to gain insight into your data with visualizations. diff --git a/docs/getting-started/tutorial-full-experience.asciidoc b/docs/getting-started/tutorial-full-experience.asciidoc index e6f2de87905bf..1e6fe39dbd013 100644 --- a/docs/getting-started/tutorial-full-experience.asciidoc +++ b/docs/getting-started/tutorial-full-experience.asciidoc @@ -1,32 +1,23 @@ -[[tutorial-build-dashboard]] -== Build your own dashboard +[[create-your-own-dashboard]] +== Create your own dashboard -Want to load some data into Kibana and build a dashboard? This tutorial shows you how to: +Ready to add data to {kib} and create your own dashboard? In this tutorial, you'll use three types of data sets that'll help you learn to: -* <> -* <> -* <> -* <> -* <> - -When you complete this tutorial, you'll have a dashboard that looks like this. - -[role="screenshot"] -image::images/tutorial-dashboard.png[] +* <> +* <> +* <> +* <> [float] -[[tutorial-load-dataset]] -=== Load sample data +[[download-the-data]] +=== Download the data -This tutorial requires you to download three data sets: +To complete the tutorial, you'll download and use the following data sets: * The complete works of William Shakespeare, suitably parsed into fields -* A set of fictitious accounts with randomly generated data +* A set of fictitious bank accounts with randomly generated data * A set of randomly generated log files -[float] -==== Download the data sets - Create a new working directory where you want to download the files. From that directory, run the following commands: [source,shell] @@ -34,7 +25,7 @@ curl -O https://download.elastic.co/demos/kibana/gettingstarted/8.x/shakespeare. curl -O https://download.elastic.co/demos/kibana/gettingstarted/8.x/accounts.zip curl -O https://download.elastic.co/demos/kibana/gettingstarted/8.x/logs.jsonl.gz -Two of the data sets are compressed. To extract the files, use these commands: +Two of the data sets are compressed. To extract the files, use the following commands: [source,shell] unzip accounts.zip @@ -43,7 +34,7 @@ gunzip logs.jsonl.gz [float] ==== Structure of the data sets -The Shakespeare data set has this structure: +The Shakespeare data set has the following structure: [source,json] { @@ -55,7 +46,7 @@ The Shakespeare data set has this structure: "text_entry": "String", } -The accounts data set is structured as follows: +The accounts data set has the following structure: [source,json] { @@ -72,7 +63,7 @@ The accounts data set is structured as follows: "state": "String" } -The logs data set has dozens of different fields. Here are the notable fields for this tutorial: +The logs data set has dozens of different fields. The notable fields include the following: [source,json] { @@ -94,7 +85,7 @@ You must also have the `create`, `manage` `read`, `write,` and `delete` index privileges. See {ref}/security-privileges.html[Security privileges] for more information. -Open *Dev Tools*. On the *Console* page, set up a mapping for the Shakespeare data set: +Open the menu, then go to *Dev Tools*. On the *Console* page, set up a mapping for the Shakespeare data set: [source,js] PUT /shakespeare @@ -111,10 +102,11 @@ PUT /shakespeare //CONSOLE -This mapping specifies field characteristics for the data set: +The mapping specifies field characteristics for the data set: * The `speaker` and `play_name` fields are keyword fields. These fields are not analyzed. The strings are treated as a single unit even if they contain multiple words. + * The `line_id` and `speech_number` fields are integers. The logs data set requires a mapping to label the latitude and longitude pairs @@ -177,6 +169,7 @@ PUT /logstash-2015.05.20 The accounts data set doesn't require any mappings. [float] +[[load-the-data-sets]] ==== Load the data sets At this point, you're ready to use the Elasticsearch {ref}/docs-bulk.html[bulk] @@ -195,14 +188,20 @@ Invoke-RestMethod "http://:/_bulk?pretty" -Method Post -ContentType These commands might take some time to execute, depending on the available computing resources. -Verify successful loading: +When you define an index pattern, the indices that match the pattern must +exist in {es} and contain data. + +To verify the availability of the indices, open the menu, go to *Dev Tools > Console*, then enter: [source,js] GET /_cat/indices?v -//CONSOLE +Alternately, use: + +[source,shell] +`curl -XGET "http://localhost:9200/_cat/indices"`. -Your output should look similar to this: +The output should look similar to: [source,shell] health status index pri rep docs.count docs.deleted store.size pri.store.size diff --git a/docs/getting-started/tutorial-sample-data.asciidoc b/docs/getting-started/tutorial-sample-data.asciidoc index 2460a55e13293..18ef862272f85 100644 --- a/docs/getting-started/tutorial-sample-data.asciidoc +++ b/docs/getting-started/tutorial-sample-data.asciidoc @@ -1,207 +1,159 @@ -[[tutorial-sample-data]] +[[explore-kibana-using-sample-data]] == Explore {kib} using sample data -Ready to get some hands-on experience with Kibana? -In this tutorial, you’ll work -with Kibana sample data and learn to: +Ready to get some hands-on experience with {kib}? +In this tutorial, you’ll work with {kib} sample data and learn to: -* <> -* <> -* <> -* <> +* <> +* <> -NOTE: If security is enabled, you must have `read`, `write`, and `manage` privileges -on the `kibana_sample_data_*` indices. See -{ref}/security-privileges.html[Security privileges] for more information. +* <> +NOTE: If security is enabled, you must have `read`, `write`, and `manage` privileges +on the `kibana_sample_data_*` indices. For more information, refer to +{ref}/security-privileges.html[Security privileges]. [float] -=== Add sample data +[[add-the-sample-data]] +=== Add the sample data -Install the Flights sample data set, if you haven't already. +Add the *Sample flight data*. . On the home page, click *Load a data set and a {kib} dashboard*. + . On the *Sample flight data* card, click *Add data*. -. Once the data is added, click *View data > Dashboard*. -+ -You’re taken to the *Global Flight* dashboard, a collection of charts, graphs, -maps, and other visualizations of the the data in the `kibana_sample_data_flights` index. -+ -[role="screenshot"] -image::getting-started/images/tutorial-sample-dashboard.png[] [float] -[[tutorial-sample-filter]] -=== Filter and query the data +[[explore-the-data]] +=== Explore the data -You can use filters and queries to -narrow the view of the data. -For more detailed information on these actions, see -{ref}/query-filter-context.html[Query and filter context]. +Explore the documents in the index that +match the selected index pattern. The index pattern tells {kib} which {es} index you want to +explore. -[float] -==== Filter the data +. Open the menu, then go to *Discover*. -. In the *Controls* visualization, select an *Origin City* and a *Destination City*. -. Click *Apply changes*. +. Make sure `kibana_sample_data_flights` is the current index pattern. +You might need to click *New* in the {kib} toolbar to refresh the data. + -The `OriginCityName` and the `DestCityName` fields filter the data on the dasbhoard to match -the data you specified. +You'll see a histogram that shows the distribution of +documents over time. A table lists the fields for +each document that matches the index. By default, all fields are shown. + -For example, the following dashboard shows the data for flights from London to Milan. +[role="screenshot"] +image::getting-started/images/tutorial-sample-discover1.png[] + +. Hover over the list of *Available fields*, then click *Add* next +to each field you want explore in the table. + [role="screenshot"] -image::getting-started/images/tutorial-sample-filter.png[] +image::getting-started/images/tutorial-sample-discover2.png[] -. To add a filter manually, click *Add filter*, -then specify the data you want to view. +[float] +[[view-and-analyze-the-data]] +=== View and analyze the data -. When you are finished experimenting, remove all filters. +A _dashboard_ is a collection of panels that provide you with an overview of your data that you can +use to analyze your data. Panels contain everything you need, including visualizations, +interactive controls, Markdown, and more. + +To open the *Global Flight* dashboard, open the menu, then go to *Dashboard*. +[role="screenshot"] +image::getting-started/images/tutorial-sample-dashboard.png[] [float] -[[tutorial-sample-query]] -==== Query the data +[[change-the-panel-data]] +==== Change the panel data -. To find all flights out of Rome, enter this query in the query bar and click *Update*: -+ -[source,text] -OriginCityName:Rome +To gain insights into your data, change the appearance and behavior of the panels. +For example, edit the metric panel to find the airline that has the lowest average fares. -. For a more complex query with AND and OR, try this: -+ -[source,text] -OriginCityName:Rome AND (Carrier:JetBeats OR "Kibana Airlines") -+ -The dashboard updates to show data for the flights out of Rome on JetBeats and -{kib} Airlines. -+ -[role="screenshot"] -image::getting-started/images/tutorial-sample-query.png[] +. In the {kib} toolbar, click *Edit*. -. When you are finished exploring the dashboard, remove the query by -clearing the contents in the query bar and clicking *Update*. +. In the *Average Ticket Price* metric panel, open the panel menu, then select *Edit visualization*. -[float] -[[tutorial-sample-discover]] -=== Discover the data +. To change the data on the panel, use an {es} {ref}/search-aggregations.html[bucket aggregation], +which sorts the documents that match your search criteria into different categories or buckets. -In Discover, you have access to every document in every index that -matches the selected index pattern. The index pattern tells {kib} which {es} index you are currently -exploring. You can submit search queries, filter the -search results, and view document data. +.. In the *Buckets* pane, select *Add > Split group*. -. From the menu, click *Discover*. +.. From the *Aggregation* dropdown, select *Terms*. -. Ensure `kibana_sample_data_flights` is the current index pattern. -You might need to click *New* in the menu bar to refresh the data. +.. From the *Field* dropdown, select *Carrier*. + +.. Set *Descending* to *4*, then click *Update*. + -You'll see a histogram that shows the distribution of -documents over time. A table lists the fields for -each matching document. By default, all fields are shown. +The average ticket price for all four airlines appear in the visualization builder. + [role="screenshot"] -image::getting-started/images/tutorial-sample-discover1.png[] +image::getting-started/images/tutorial-sample-edit1.png[] -. To choose which fields to display, -hover the pointer over the list of *Available fields*, and then click *add* next -to each field you want include as a column in the table. -+ -For example, if you add the `DestAirportID` and `DestWeather` fields, -the display includes columns for those two fields. +. To save your changes, click *Save and return* in the {kib} toolbar. + +. To save the dashboard, click *Save* in the {kib} toolbar. + [role="screenshot"] -image::getting-started/images/tutorial-sample-discover2.png[] +image::getting-started/images/tutorial-sample-edit2.png[] [float] -[[tutorial-sample-edit]] -=== Edit a visualization - -You have edit permissions for the *Global Flight* dashboard, so you can change -the appearance and behavior of the visualizations. For example, you might want -to see which airline has the lowest average fares. - -. In the side navigation, click *Recently viewed* and open the *Global Flight Dashboard*. -. In the menu bar, click *Edit*. -. In the *Average Ticket Price* visualization, click the gear icon in -the upper right. -. From the *Options* menu, select *Edit visualization*. -+ -*Average Ticket Price* is a metric visualization. -To specify which groups to display -in this visualization, you use an {es} {ref}/search-aggregations.html[bucket aggregation]. -This aggregation sorts the documents that match your search criteria into different -categories, or buckets. +[[filter-and-query-the-data]] +==== Filter and query the data -[float] -==== Create a bucket aggregation +To focus in on the data you want to explore, use filters and queries. +For more information, refer to +{ref}/query-filter-context.html[Query and filter context]. + +To filter the data: -. In the *Buckets* pane, select *Add > Split group*. -. In the *Aggregation* dropdown, select *Terms*. -. In the *Field* dropdown, select *Carrier*. -. Set *Descending* to *4*. -. Click *Apply changes* image:images/apply-changes-button.png[]. +. In the *Controls* visualization, select an *Origin City* and *Destination City*, then click *Apply changes*. + -You now see the average ticket price for all four airlines. +The `OriginCityName` and the `DestCityName` fields filter the data in the panels. + -[role="screenshot"] -image::getting-started/images/tutorial-sample-edit1.png[] - -[float] -==== Save the visualization - -. In the menu bar, click *Save*. -. Leave the visualization name as is and confirm the save. -. Go to the *Global Flight* dashboard and scroll the *Average Ticket Price* visualization to see the four prices. -. Optionally, edit the dashboard. Resize the panel -for the *Average Ticket Price* visualization by dragging the -handle in the lower right. You can also rearrange the visualizations by clicking -the header and dragging. Be sure to save the dashboard. +For example, the following dashboard shows the data for flights from London to Milan. + [role="screenshot"] -image::getting-started/images/tutorial-sample-edit2.png[] +image::getting-started/images/tutorial-sample-filter.png[] -[float] -[[tutorial-sample-inspect]] -=== Inspect the data +. To manually add a filter, click *Add filter*, +then specify the data you want to view. -Seeing visualizations of your data is great, -but sometimes you need to look at the actual data to -understand what's really going on. You can inspect the data behind any visualization -and view the {es} query used to retrieve it. +. When you are finished experimenting, remove all filters. -. In the dashboard, hover the pointer over the pie chart, and then click the icon in the upper right. -. From the *Options* menu, select *Inspect*. +[[query-the-data]] +To query the data: + +. To view all flights out of Rome, enter the following in the *KQL* query bar, then click *Update*: + -The initial view shows the document count. +[source,text] +OriginCityName: Rome + +. For a more complex query with AND and OR, enter: ++ +[source,text] +OriginCityName:Rome AND (Carrier:JetBeats OR Carrier:"Kibana Airlines") ++ +The dashboard panels update to display the flights out of Rome on JetBeats and +{kib} Airlines. + [role="screenshot"] -image::getting-started/images/tutorial-sample-inspect1.png[] - -. To look at the query used to fetch the data for the visualization, select *View > Requests* -in the upper right of the Inspect pane. - -[float] -[[tutorial-sample-remove]] -=== Remove the sample data set -When you’re done experimenting with the sample data set, you can remove it. +image::getting-started/images/tutorial-sample-query.png[] -. Go to the *Sample data* page. -. On the *Sample flight data* card, click *Remove*. +. When you are finished exploring, remove the query by +clearing the contents in the *KQL* query bar, then click *Update*. [float] === Next steps -Now that you have a handle on the {kib} basics, you might be interested in the -tutorial <>, where you'll learn to: +Now that you know the {kib} basics, try out the <> tutorial, where you'll learn to: + +* Add a data set to {kib} -* Load data * Define an index pattern -* Discover and explore data -* Create visualizations -* Add visualizations to a dashboard +* Discover and explore data +* Create and add panels to a dashboard diff --git a/docs/getting-started/tutorial-visualizing.asciidoc b/docs/getting-started/tutorial-visualizing.asciidoc index 20b4e33583072..33a7035160247 100644 --- a/docs/getting-started/tutorial-visualizing.asciidoc +++ b/docs/getting-started/tutorial-visualizing.asciidoc @@ -1,47 +1,76 @@ [[tutorial-visualizing]] === Visualize your data -In *Visualize*, you can shape your data using a variety -of charts, tables, and maps, and more. In this tutorial, you'll create four -visualizations: +Shape your data using a variety +of {kib} supported visualizations, tables, and more. In this tutorial, you'll create four +visualizations that you'll use to create a dashboard. -* <> -* <> -* <> -* <> +To begin, open the menu, go to *Dashboard*, then click *Create new dashboard*. [float] -[[tutorial-visualize-pie]] -=== Pie chart +[[compare-the-number-of-speaking-parts-in-the-play]] +=== Compare the number of speaking parts in the plays -Use the pie chart to -gain insight into the account balances in the bank account data. +To visualize the Shakespeare data and compare the number of speaking parts in the plays, create a bar chart using *Lens*. -. Open then menu, then go to *Visualize*. -. Click *Create visualization*. +. Click *Create new*, then click *Lens* on the *New Visualization* window. + [role="screenshot"] -image::images/tutorial-visualize-wizard-step-1.png[] -. Click *Pie*. +image::images/tutorial-visualize-wizard-step-1.png[Bar chart] -. On the *Choose a source* window, select `ba*`. +. Make sure the index pattern is *shakes*. + +. Display the play data along the x-axis. + +.. From the *Available fields* list, drag and drop *play_name* to the *X-axis* field. + +.. Click *Top values of play_name*. + +.. From the *Order direction* dropdown, select *Ascending*. + +.. In the *Label* field, enter `Play Name`. + +. Display the number of speaking parts per play along the y-axis. + +.. From the *Available fields* list, drag and drop *speaker* to the *Y-axis* field. + +.. Click *Unique count of speaker*. + +.. In the *Label* field, enter `Speaking Parts`. ++ +[role="screenshot"] +image::images/tutorial-visualize-bar-1.5.png[Bar chart] + +. *Save* the chart with the name `Bar Example`. + -Initially, the pie contains a single "slice." -That's because the default search matches all documents. +To show a tooltip with the number of speaking parts for that play, hover over a bar. + -To specify which slices to display in the pie, you use an Elasticsearch -{ref}/search-aggregations.html[bucket aggregation]. This aggregation -sorts the documents that match your search criteria into different -categories. You'll use a bucket aggregation to establish -multiple ranges of account balances and find out how many accounts fall into -each range. +Notice how the individual play names show up as whole phrases, instead of +broken up into individual words. This is the result of the mapping +you did at the beginning of the tutorial, when you marked the `play_name` field +as `not analyzed`. -. In the *Buckets* pane, click *Add > Split slices.* +[float] +[[view-the-average-account-balance-by-age]] +=== View the average account balance by age + +To gain insight into the account balances in the bank account data, create a pie chart. In this tutorial, you'll use the {es} +{ref}/search-aggregations.html[bucket aggregation] to specify the pie slices to display. The bucket aggregation sorts the documents that match your search criteria into different +categories and establishes multiple ranges of account balances so that you can find how many accounts fall into each range. + +. Click *Create new*, then click *Pie* on the *New Visualization* window. + +. On the *Choose a source* window, select `ba*`. + +Since the default search matches all documents, the pie contains a single slice. + +. In the *Buckets* pane, click *Add > Split slices.* + .. From the *Aggregation* dropdown, select *Range*. + .. From the *Field* dropdown, select *balance*. -.. Click *Add range* four times to bring the total number of ranges to six. -.. Define the following ranges: + +.. Click *Add range* until there are six rows of fields, then define the following ranges: + [source,text] 0 999 @@ -53,80 +82,83 @@ each range. . Click *Update*. + -Now you can see what proportion of the 1000 accounts fall into each balance -range. +The pie chart displays the proportion of the 1,000 accounts that fall into each of the ranges. + [role="screenshot"] -image::images/tutorial-visualize-pie-2.png[] +image::images/tutorial-visualize-pie-2.png[Pie chart] -. Add another bucket aggregation that looks at the ages of the account -holders. +. Add another bucket aggregation that displays the ages of the account holders. .. In the *Buckets* pane, click *Add*, then click *Split slices*. + .. From the *Sub aggregation* dropdown, select *Terms*. -.. From the *Field* dropdown, select *age*. -. Click *Update*. +.. From the *Field* dropdown, select *age*, then click *Update*. + The break down of the ages of the account holders are displayed in a ring around the balance ranges. + [role="screenshot"] -image::images/tutorial-visualize-pie-3.png[] +image::images/tutorial-visualize-pie-3.png[Final pie chart] . Click *Save*, then enter `Pie Example` in the *Title* field. [float] -[[tutorial-visualize-bar]] -=== Bar chart +[role="xpack"] +[[visualize-geographic-information]] +=== Visualize geographic information -Use a bar chart to look at the Shakespeare data set and compare -the number of speaking parts in the plays. +To visualize geographic information in the log file data, use <>. -. Click *Create visualization > Vertical Bar*, then set the source to `shakes*`. +. Click *Create new*, then click *Maps* on the *New Visualization* window. + +. To change the time, use the time filter. + +.. Set the *Start date* to `May 18, 2015 @ 12:00:00.000`. + +.. Set the *End date* to `May 20, 2015 @ 12:00:00.000`. + -Initially, the chart is a single bar that shows the total count -of documents that match the default wildcard query. +[role="screenshot"] +image::images/gs_maps_time_filter.png[Time filter for Maps tutorial] -. Show the number of speaking parts per play along the y-axis. +.. Click *Update* + +. Map the geo coordinates from the log files. -.. In the *Metrics* pane, expand *Y-axis*. -.. From the *Aggregation* dropdown, select *Unique Count*. -.. From the *Field* dropdown, select *speaker*. -.. In the *Custom label* field, enter `Speaking Parts`. +.. Click *Add layer > Clusters and grids*. -. Click *Update*. +.. From the *Index pattern* dropdown, select *logstash*. -. Show the plays along the x-axis. +.. Click *Add layer*. -.. In the *Buckets* pane, click *Add > X-axis*. -.. From the *Aggregation* dropdown, select *Terms*. -.. From the *Field* dropdown, select *play_name*. -.. To list the plays alphabetically, select *Ascending* from the *Order* dropdown. -.. In the *Custom label* field, enter `Play Name`. +. Specify the *Layer Style*. -. Click *Update*. +.. From the *Fill color* dropdown, select the yellow to red color ramp. + +.. In the *Border width* field, enter `3`. + +.. From the *Border color* dropdown, select *#FFF*, then click *Save & close*. + [role="screenshot"] -image::images/tutorial-visualize-bar-1.5.png[] -. *Save* the chart with the name `Bar Example`. -+ -Hovering over a bar shows a tooltip with the number of speaking parts for -that play. -+ -Notice how the individual play names show up as whole phrases, instead of -broken into individual words. This is the result of the mapping -you did at the beginning of the tutorial, when you marked the `play_name` field -as `not analyzed`. +image::images/tutorial-visualize-map-2.png[Map] + +. Click *Save*, then enter `Map Example` in the *Title* field. + +. Add the map to your dashboard. + +.. Open the menu, go to *Dashboard*, then click *Add*. + +.. On the *Add panels* flyout, click *Map Example*. [float] [[tutorial-visualize-markdown]] -=== Markdown +=== Add context to your visualizations with Markdown -Add formatted text to your dashboard with a markdown tool. +Add context to your new visualizations with Markdown text. -. Click *Create visualization > Markdown*. -. In the text field, enter the following: +. Click *Create new*, then click *Markdown* on the *New Visualization* window. + +. In the *Markdown* text field, enter: + [source,markdown] # This is a tutorial dashboard! @@ -140,40 +172,22 @@ The Markdown renders in the preview pane. [role="screenshot"] image::images/tutorial-visualize-md-2.png[] -. *Save* the tool with the name `Markdown Example`. +. Click *Save*, then enter `Markdown Example` in the *Title* field. -[float] -[[tutorial-visualize-map]] -=== Map +[role="screenshot"] +image::images/tutorial-dashboard.png[] -Using <>, you can visualize geographic information in the log file sample data. +[float] +=== Next steps -. Click *Create visualization > Maps*. +Now that you have the basics, you're ready to start exploring your own system data with {kib}. -. Set the time. -.. In the time filter, click *Show dates*. -.. Click the start date, then *Absolute*. -.. Set the *Start date* to May 18, 2015. -.. Click *now*, then *Absolute*. -.. Set the *End date* to May 20, 2015. -.. Click *Update* +* To add your own data to {kib}, refer to <>. -. Map the geo coordinates from the log files. +* To search and filter your data, refer to {kibana-ref}/discover.html[Discover]. -.. Click *Add layer > Clusters and Grids*. -.. From the *Index pattern* dropdown, select *logstash*. -.. Click *Add layer*. +* To create a dashboard with your own data, refer to <>. -. Set the *Layer Style*. -.. From the *Fill color* dropdown, select the yellow to red color ramp. -.. From the *Border color* dropdown, select white. -.. Click *Save & close*. -+ -The map looks like this: -+ -[role="screenshot"] -image::images/tutorial-visualize-map-2.png[] +* To create maps that you can add to your dashboards, refer to <>. -. Navigate the map by clicking and dragging. Use the controls -to zoom the map and set filters. -. *Save* the map with the name `Map Example`. +* To create presentations of your live data, refer to <>. diff --git a/docs/glossary.asciidoc b/docs/glossary.asciidoc index 1edb33032418b..be24402170bbe 100644 --- a/docs/glossary.asciidoc +++ b/docs/glossary.asciidoc @@ -151,7 +151,7 @@ that you are interested in. A navigation path that retains context (time range and filters) from the source to the destination, so you can view the data from a new perspective. A dashboard that shows the overall status of multiple data centers -might have a drilldown to a dashboard for a single data center. See {kibana-ref}/drilldowns.html[Drilldowns]. +might have a drilldown to a dashboard for a single data center. See {kibana-ref}/dashboard.html[Drilldowns]. // end::drilldown-def[] @@ -238,7 +238,7 @@ support for scripted fields. See Enables you to build visualizations by dragging and dropping data fields. Lens makes makes smart visualization suggestions for your data, allowing you to switch between visualization types. -See {kibana-ref}/lens.html[Lens]. +See {kibana-ref}/dashboard.html[Lens]. // end::lens-def[] @@ -350,7 +350,7 @@ A {kib} control that constrains the search results to a particular time period. [[glossary-timelion]] Timelion :: // tag::timelion-def[] A tool for building a time series visualization that analyzes data in time order. -See {kibana-ref}/timelion.html[Timelion]. +See {kibana-ref}/dashboard.html[Timelion]. // end::timelion-def[] @@ -364,7 +364,7 @@ Timestamped data such as logs, metrics, and events that is indexed on an ongoing // tag::tsvb-def[] A time series data visualizer that allows you to combine an infinite number of aggregations to display complex data. -See {kibana-ref}/TSVB.html[TSVB]. +See {kibana-ref}/dashboard.html[TSVB]. // end::tsvb-def[] @@ -388,7 +388,7 @@ indices and guides you through resolving issues, including reindexing. See [[glossary-vega]] Vega :: // tag::vega-def[] A declarative language used to create interactive visualizations. -See {kibana-ref}/vega-graph.html[Vega]. +See {kibana-ref}/dashboard.html[Vega]. // end::vega-def[] [[glossary-vector]] vector data:: diff --git a/docs/management/advanced-options.asciidoc b/docs/management/advanced-options.asciidoc index 9f13c152b4cbe..ed20166c87f29 100644 --- a/docs/management/advanced-options.asciidoc +++ b/docs/management/advanced-options.asciidoc @@ -150,6 +150,12 @@ working on big documents. ==== Machine learning [horizontal] +`ml:anomalyDetection:results:enableTimeDefaults`:: Use the default time filter +in the *Single Metric Viewer* and *Anomaly Explorer*. If this setting is +disabled, the results for the full time range are shown. +`ml:anomalyDetection:results:timeDefaults`:: Sets the default time filter for +viewing {anomaly-job} results. This setting must contain `from` and `to` values (see {ref}/common-options.html#date-math[accepted formats]). It is ignored +unless `ml:anomalyDetection:results:enableTimeDefaults` is enabled. `ml:fileDataVisualizerMaxFileSize`:: Sets the file size limit when importing data in the {data-viz}. The default value is `100MB`. The highest supported value for this setting is `1GB`. @@ -219,6 +225,7 @@ be inconsistent because different shards might be in different refresh states. `search:includeFrozen`:: Includes {ref}/frozen-indices.html[frozen indices] in results. Searching through frozen indices might increase the search time. This setting is off by default. Users must opt-in to include frozen indices. +`search:timeout`:: Change the maximum timeout for a search session or set to 0 to disable the timeout and allow queries to run to completion. [float] [[kibana-siem-settings]] diff --git a/docs/management/index-patterns.asciidoc b/docs/management/index-patterns.asciidoc index 05036311c094c..7de2a042160e9 100644 --- a/docs/management/index-patterns.asciidoc +++ b/docs/management/index-patterns.asciidoc @@ -7,7 +7,7 @@ you want to work with. Once you create an index pattern, you're ready to: * Interactively explore your data in <>. -* Analyze your data in charts, tables, gauges, tag clouds, and more in <>. +* Analyze your data in charts, tables, gauges, tag clouds, and more in <>. * Show off your data in a <> workpad. * If your data includes geo data, visualize it with <>. diff --git a/docs/management/managing-saved-objects.asciidoc b/docs/management/managing-saved-objects.asciidoc index 51de5ad620b46..8c885ddca52e5 100644 --- a/docs/management/managing-saved-objects.asciidoc +++ b/docs/management/managing-saved-objects.asciidoc @@ -92,5 +92,5 @@ index pattern. This is useful if the index you were working with has been rename WARNING: Validation is not performed for object properties. Submitting an invalid change will render the object unusable. A more failsafe approach is to use -*Discover*, *Visualize*, or *Dashboard* to create new objects instead of +*Discover* or *Dashboard* to create new objects instead of directly editing an existing one. diff --git a/docs/management/numeral.asciidoc b/docs/management/numeral.asciidoc index 5d4d48ca785e1..a8834a3278a9e 100644 --- a/docs/management/numeral.asciidoc +++ b/docs/management/numeral.asciidoc @@ -10,7 +10,7 @@ Numeral formatting patterns are used in multiple places in {kib}, including: * <> * <> -* <> +* <> * <> The simplest pattern format is `0`, and the default {kib} pattern is `0,0.[000]`. diff --git a/docs/management/rollups/create_and_manage_rollups.asciidoc b/docs/management/rollups/create_and_manage_rollups.asciidoc index 831b536f8c1cb..8aa57f50fe94b 100644 --- a/docs/management/rollups/create_and_manage_rollups.asciidoc +++ b/docs/management/rollups/create_and_manage_rollups.asciidoc @@ -60,7 +60,7 @@ You can read more at {ref}/rollup-job-config.html[rollup job configuration]. === Try it: Create and visualize rolled up data This example creates a rollup job to capture log data from sample web logs. -To follow along, add the <>. +To follow along, add the <>. In this example, you want data that is older than 7 days in the target index pattern `kibana_sample_data_logs` to roll up once a day into the index `rollup_logstash`. You’ll bucket the @@ -145,7 +145,7 @@ is `rollup_logstash,kibana_sample_data_logs`. In this index pattern, `rollup_log matches the rolled up index pattern and `kibana_sample_data_logs` matches the index pattern for raw data. -. Go to *Visualize* and create a vertical bar chart. +. Go to *Dashboard* and create a vertical bar chart. . Choose `rollup_logstash,kibana_sample_data_logs` as your source to see both the raw and rolled up data. diff --git a/docs/redirects.asciidoc b/docs/redirects.asciidoc index 1a20c1df582e6..42d1d89145d79 100644 --- a/docs/redirects.asciidoc +++ b/docs/redirects.asciidoc @@ -59,7 +59,7 @@ This page has moved. Please see <>. [role="exclude",id="add-sample-data"] == Add sample data -This page has moved. Please see <>. +This page has moved. Please see <>. [role="exclude",id="tilemap"] == Coordinate map @@ -95,3 +95,8 @@ More information on this new feature is available in <>. == Role-based access control This content has moved to the <> page. + +[role="exclude",id="TSVB"] +== TSVB + +This page was deleted. See <>. diff --git a/docs/settings/alert-action-settings.asciidoc b/docs/settings/alert-action-settings.asciidoc index e02c7f212277e..13c1d20552fa1 100644 --- a/docs/settings/alert-action-settings.asciidoc +++ b/docs/settings/alert-action-settings.asciidoc @@ -37,12 +37,12 @@ You can configure the following settings in the `kibana.yml` file. [cols="2*<"] |=== -| `xpack.actions.whitelistedHosts` - | A list of hostnames that {kib} is allowed to connect to when built-in actions are triggered. It defaults to `[*]`, allowing any host, but keep in mind the potential for SSRF attacks when hosts are not explicitly whitelisted. An empty list `[]` can be used to block built-in actions from making any external connections. + +| `xpack.actions.allowedHosts` {ess-icon} + | A list of hostnames that {kib} is allowed to connect to when built-in actions are triggered. It defaults to `[*]`, allowing any host, but keep in mind the potential for SSRF attacks when hosts are not explicitly added to the allowed hosts. An empty list `[]` can be used to block built-in actions from making any external connections. + + - Note that hosts associated with built-in actions, such as Slack and PagerDuty, are not automatically whitelisted. If you are not using the default `[*]` setting, you must ensure that the corresponding endpoints are whitelisted as well. + Note that hosts associated with built-in actions, such as Slack and PagerDuty, are not automatically added to allowed hosts. If you are not using the default `[*]` setting, you must ensure that the corresponding endpoints are added to the allowed hosts as well. -| `xpack.actions.enabledActionTypes` +| `xpack.actions.enabledActionTypes` {ess-icon} | A list of action types that are enabled. It defaults to `[*]`, enabling all types. The names for built-in {kib} action types are prefixed with a `.` and include: `.server-log`, `.slack`, `.email`, `.index`, `.pagerduty`, and `.webhook`. An empty list `[]` will disable all action types. + + Disabled action types will not appear as an option when creating new connectors, but existing connectors and actions of that type will remain in {kib} and will not function. diff --git a/docs/settings/dev-settings.asciidoc b/docs/settings/dev-settings.asciidoc index e92e9c2928793..62553293a7d03 100644 --- a/docs/settings/dev-settings.asciidoc +++ b/docs/settings/dev-settings.asciidoc @@ -14,7 +14,7 @@ They are enabled by default. [cols="2*<"] |=== -| `xpack.grokdebugger.enabled` +| `xpack.grokdebugger.enabled` {ess-icon} | Set to `true` to enable the <>. Defaults to `true`. |=== diff --git a/docs/settings/graph-settings.asciidoc b/docs/settings/graph-settings.asciidoc index a66785242c19a..876e3dc936ccf 100644 --- a/docs/settings/graph-settings.asciidoc +++ b/docs/settings/graph-settings.asciidoc @@ -13,7 +13,7 @@ You do not need to configure any settings to use the {graph-features}. [cols="2*<"] |=== -| `xpack.graph.enabled` +| `xpack.graph.enabled` {ess-icon} | Set to `false` to disable the {graph-features}. |=== diff --git a/docs/settings/monitoring-settings.asciidoc b/docs/settings/monitoring-settings.asciidoc index d538519eefcc4..6c8632efa9cc0 100644 --- a/docs/settings/monitoring-settings.asciidoc +++ b/docs/settings/monitoring-settings.asciidoc @@ -37,6 +37,11 @@ For more information, see monitoring back-end does not run and {kib} stats are not sent to the monitoring cluster. +a|`monitoring.cluster_alerts.` +`email_notifications.email_address` {ess-icon} + | Specifies the email address where you want to receive cluster alerts. + See <> for details. + | `monitoring.ui.elasticsearch.hosts` | Specifies the location of the {es} cluster where your monitoring data is stored. By default, this is the same as `elasticsearch.hosts`. This setting enables @@ -85,7 +90,7 @@ These settings control how data is collected from {kib}. | Set to `true` (default) to enable data collection from the {kib} NodeJS server for {kib} dashboards to be featured in *{stack-monitor-app}*. -| `monitoring.kibana.collection.interval` +| `monitoring.kibana.collection.interval` {ess-icon} | Specifies the number of milliseconds to wait in between data sampling on the {kib} NodeJS server for the metrics that are displayed in the {kib} dashboards. Defaults to `10000` (10 seconds). @@ -111,7 +116,7 @@ about configuring {kib}, see | Set to `false` to hide *{stack-monitor-app}*. The monitoring back-end continues to run as an agent for sending {kib} stats to the monitoring cluster. Defaults to `true`. - + | `monitoring.ui.logs.index` | Specifies the name of the indices that are shown on the <> page in *{stack-monitor-app}*. The default value @@ -124,7 +129,7 @@ about configuring {kib}, see {ref}/search-aggregations-bucket-terms-aggregation.html#search-aggregations-bucket-terms-aggregation-size[Terms Aggregation]. Defaults to `10000`. -| `monitoring.ui.min_interval_seconds` +| `monitoring.ui.min_interval_seconds` {ess-icon} | Specifies the minimum number of seconds that a time bucket in a chart can represent. Defaults to 10. If you modify the `monitoring.ui.collection.interval` in `elasticsearch.yml`, use the same @@ -143,7 +148,7 @@ container, then Cgroup statistics are not useful. [cols="2*<"] |=== -| `monitoring.ui.container.elasticsearch.enabled` +| `monitoring.ui.container.elasticsearch.enabled` {ess-icon} | For {es} clusters that are running in containers, this setting changes the *Node Listing* to display the CPU utilization based on the reported Cgroup statistics. It also adds the calculated Cgroup CPU utilization to the diff --git a/docs/settings/reporting-settings.asciidoc b/docs/settings/reporting-settings.asciidoc index 0b6f94e86a39f..9c8d753a2d668 100644 --- a/docs/settings/reporting-settings.asciidoc +++ b/docs/settings/reporting-settings.asciidoc @@ -17,10 +17,10 @@ You can configure `xpack.reporting` settings in your `kibana.yml` to: [cols="2*<"] |=== -| [[xpack-enable-reporting]]`xpack.reporting.enabled` +| [[xpack-enable-reporting]]`xpack.reporting.enabled` {ess-icon} | Set to `false` to disable the {report-features}. -| `xpack.reporting.encryptionKey` +| `xpack.reporting.encryptionKey` {ess-icon} | Set to an alphanumeric, at least 32 characters long text string. By default, {kib} will generate a random key when it starts, which will cause pending reports to fail after restart. Configure this setting to preserve the same key across multiple restarts and multiple instances of {kib}. @@ -86,7 +86,7 @@ reports, you might need to change the following settings. | How often the index that stores reporting jobs rolls over to a new index. Valid values are `year`, `month`, `week`, `day`, and `hour`. Defaults to `week`. -| `xpack.reporting.queue.pollEnabled` +| `xpack.reporting.queue.pollEnabled` {ess-icon} | Set to `true` (default) to enable the {kib} instance to to poll the index for pending jobs and claim them for execution. Setting this to `false` allows the {kib} instance to only add new jobs to the reporting queue, list jobs, and @@ -107,7 +107,7 @@ security is enabled, `xpack.security.encryptionKey`. | Specifies the number of milliseconds that the reporting poller waits between polling the index for any pending Reporting jobs. Defaults to `3000` (3 seconds). -| [[xpack-reporting-q-timeout]] `xpack.reporting.queue.timeout` +| [[xpack-reporting-q-timeout]] `xpack.reporting.queue.timeout` {ess-icon} | How long each worker has to produce a report. If your machine is slow or under heavy load, you might need to increase this timeout. Specified in milliseconds. If a Reporting job execution time goes over this time limit, the job will be @@ -125,19 +125,22 @@ control the capturing process. [cols="2*<"] |=== -| `xpack.reporting.capture.timeouts.openUrl` +a| `xpack.reporting.capture.timeouts` +`.openUrl` {ess-icon} | Specify how long to allow the Reporting browser to wait for the "Loading..." screen to dismiss and find the initial data for the Kibana page. If the time is exceeded, a page screenshot is captured showing the current state, and the download link shows a warning message. Defaults to `60000` (1 minute). -| `xpack.reporting.capture.timeouts.waitForElements` +a| `xpack.reporting.capture.timeouts` +`.waitForElements` {ess-icon} | Specify how long to allow the Reporting browser to wait for all visualization panels to load on the Kibana page. If the time is exceeded, a page screenshot is captured showing the current state, and the download link shows a warning message. Defaults to `30000` (30 seconds). -| `xpack.reporting.capture.timeouts.renderComplete` +a| `xpack.reporting.capture.timeouts` +`.renderComplete` {ess-icon} | Specify how long to allow the Reporting browser to wait for all visualizations to fetch and render the data. If the time is exceeded, a page screenshot is captured showing the current state, and the download link shows a warning message. Defaults to @@ -155,7 +158,7 @@ available, but there will likely be errors in the visualizations in the report. [cols="2*<"] |=== -| `xpack.reporting.capture.maxAttempts` +| `xpack.reporting.capture.maxAttempts` {ess-icon} | If capturing a report fails for any reason, {kib} will re-attempt other reporting job, as many times as this setting. Defaults to `3`. @@ -166,7 +169,7 @@ available, but there will likely be errors in the visualizations in the report. visualizations, try increasing this value. Defaults to `3000` (3 seconds). -| [[xpack-reporting-browser]] `xpack.reporting.capture.browser.type` +| [[xpack-reporting-browser]] `xpack.reporting.capture.browser.type` {ess-icon} | Specifies the browser to use to capture screenshots. This setting exists for backward compatibility. The only valid option is `chromium`. @@ -180,20 +183,24 @@ When `xpack.reporting.capture.browser.type` is set to `chromium` (default) you c [cols="2*<"] |=== -| `xpack.reporting.capture.browser.chromium.disableSandbox` +a| `xpack.reporting.capture.browser` +`.chromium.disableSandbox` | It is recommended that you research the feasibility of enabling unprivileged user namespaces. See Chromium Sandbox for additional information. Defaults to false for all operating systems except Debian, Red Hat Linux, and CentOS which use true. -| `xpack.reporting.capture.browser.chromium.proxy.enabled` +a| `xpack.reporting.capture.browser` +`.chromium.proxy.enabled` | Enables the proxy for Chromium to use. When set to `true`, you must also specify the `xpack.reporting.capture.browser.chromium.proxy.server` setting. Defaults to `false`. -| `xpack.reporting.capture.browser.chromium.proxy.server` +a| `xpack.reporting.capture.browser` +.chromium.proxy.server` | The uri for the proxy server. Providing the username and password for the proxy server via the uri is not supported. -| `xpack.reporting.capture.browser.chromium.proxy.bypass` +a| `xpack.reporting.capture.browser` +.chromium.proxy.bypass` | An array of hosts that should not go through the proxy server and should use a direct connection instead. Examples of valid entries are "elastic.co", "*.elastic.co", ".elastic.co", ".elastic.co:5601". @@ -205,27 +212,27 @@ When `xpack.reporting.capture.browser.type` is set to `chromium` (default) you c [cols="2*<"] |=== -| [[xpack-reporting-csv]] `xpack.reporting.csv.maxSizeBytes` +| [[xpack-reporting-csv]] `xpack.reporting.csv.maxSizeBytes` {ess-icon} | The maximum size of a CSV file before being truncated. This setting exists to prevent large exports from causing performance and storage issues. Defaults to `10485760` (10mB). | `xpack.reporting.csv.scroll.size` - | Number of documents retrieved from {es} for each scroll iteration during a CSV + | Number of documents retrieved from {es} for each scroll iteration during a CSV export. Defaults to `500`. | `xpack.reporting.csv.scroll.duration` | Amount of time allowed before {kib} cleans the scroll context during a CSV export. Defaults to `30s`. - + | `xpack.reporting.csv.checkForFormulas` | Enables a check that warns you when there's a potential formula involved in the output (=, -, +, and @ chars). See OWASP: https://www.owasp.org/index.php/CSV_Injection Defaults to `true`. - + | `xpack.reporting.csv.enablePanelActionDownload` - | Enables CSV export from a saved search on a dashboard. This action is available in the dashboard + | Enables CSV export from a saved search on a dashboard. This action is available in the dashboard panel menu for the saved search. Defaults to `true`. diff --git a/docs/settings/security-settings.asciidoc b/docs/settings/security-settings.asciidoc index a0995cab984d4..b6eecc6ea9f04 100644 --- a/docs/settings/security-settings.asciidoc +++ b/docs/settings/security-settings.asciidoc @@ -73,27 +73,27 @@ The valid settings in the `xpack.security.authc.providers` namespace vary depend [cols="2*<"] |=== | `xpack.security.authc.providers.` -`..enabled` +`..enabled` {ess-icon} | Determines if the authentication provider should be enabled. By default, {kib} enables the provider as soon as you configure any of its properties. | `xpack.security.authc.providers.` -`..order` +`..order` {ess-icon} | Order of the provider in the authentication chain and on the Login Selector UI. | `xpack.security.authc.providers.` -`..description` +`..description` {ess-icon} | Custom description of the provider entry displayed on the Login Selector UI. | `xpack.security.authc.providers.` -`..hint` +`..hint` {ess-icon} | Custom hint for the provider entry displayed on the Login Selector UI. | `xpack.security.authc.providers.` -`..icon` +`..icon` {ess-icon} | Custom icon for the provider entry displayed on the Login Selector UI. | `xpack.security.authc.providers.` -`..showInSelector` +`..showInSelector` {ess-icon} | Flag that indicates if the provider should have an entry on the Login Selector UI. Setting this to `false` doesn't remove the provider from the authentication chain. 2+a| @@ -104,7 +104,7 @@ You are unable to set this setting to `false` for `basic` and `token` authentica ============ | `xpack.security.authc.providers.` -`..accessAgreement.message` +`..accessAgreement.message` {ess-icon} | Access agreement text in Markdown format. For more information, refer to <>. |=== @@ -118,11 +118,11 @@ In addition to <.realm` +`saml..realm` {ess-icon} | SAML realm in {es} that provider should use. | `xpack.security.authc.providers.` -`saml..useRelayStateDeepLink` +`saml..useRelayStateDeepLink` {ess-icon} | Determines if the provider should treat the `RelayState` parameter as a deep link in {kib} during Identity Provider initiated log in. By default, this setting is set to `false`. The link specified in `RelayState` should be a relative, URL-encoded {kib} URL. For example, the `/app/dashboards#/list` link in `RelayState` parameter would look like this: `RelayState=%2Fapp%2Fdashboards%23%2Flist`. |=== @@ -136,7 +136,7 @@ In addition to <.realm` +`oidc..realm` {ess-icon} | OpenID Connect realm in {es} that the provider should use. |=== @@ -168,13 +168,13 @@ You can configure the following settings in the `kibana.yml` file. [cols="2*<"] |=== -| `xpack.security.loginAssistanceMessage` +| `xpack.security.loginAssistanceMessage` {ess-icon} | Adds a message to the login UI. Useful for displaying information about maintenance windows, links to corporate sign up pages, and so on. -| `xpack.security.loginHelp` +| `xpack.security.loginHelp` {ess-icon} | Adds a message accessible at the login UI with additional help information for the login process. -| `xpack.security.authc.selector.enabled` +| `xpack.security.authc.selector.enabled` {ess-icon} | Determines if the login selector UI should be enabled. By default, this setting is set to `true` if more than one authentication provider is configured. |=== @@ -203,12 +203,12 @@ You can configure the following settings in the `kibana.yml` file. this to `true` if SSL is configured outside of {kib} (for example, you are routing requests through a load balancer or proxy). -| `xpack.security.sameSiteCookies` +| `xpack.security.sameSiteCookies` {ess-icon} | Sets the `SameSite` attribute of the session cookie. This allows you to declare whether your cookie should be restricted to a first-party or same-site context. Valid values are `Strict`, `Lax`, `None`. This is *not set* by default, which modern browsers will treat as `Lax`. If you use Kibana embedded in an iframe in modern browsers, you might need to set it to `None`. Setting this value to `None` requires cookies to be sent over a secure connection by setting `xpack.security.secureCookies: true`. -| `xpack.security.session.idleTimeout` +| `xpack.security.session.idleTimeout` {ess-icon} | Ensures that user sessions will expire after a period of inactivity. This and `xpack.security.session.lifespan` are both highly recommended. By default, this setting is not set. @@ -218,7 +218,7 @@ highly recommended. By default, this setting is not set. The format is a string of `[ms\|s\|m\|h\|d\|w\|M\|Y]` (e.g. '20m', '24h', '7d', '1w'). ============ -| `xpack.security.session.lifespan` +| `xpack.security.session.lifespan` {ess-icon} | Ensures that user sessions will expire after the defined time period. This behavior also known as an "absolute timeout". If this is _not_ set, user sessions could stay active indefinitely. This and `xpack.security.session.idleTimeout` are both highly recommended. By default, this setting is not set. diff --git a/docs/setup/connect-to-elasticsearch.asciidoc b/docs/setup/connect-to-elasticsearch.asciidoc index f750784c47043..ea02afb8a9fda 100644 --- a/docs/setup/connect-to-elasticsearch.asciidoc +++ b/docs/setup/connect-to-elasticsearch.asciidoc @@ -11,7 +11,7 @@ To start working with your data in {kib}, you can: * Connect {kib} with existing {es} indices. -If you're not ready to use your own data, you can add a <> +If you're not ready to use your own data, you can add a <> to see all that you can do in {kib}. [float] diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc index 4a931aabd3646..f03022e9e9f00 100644 --- a/docs/setup/settings.asciidoc +++ b/docs/setup/settings.asciidoc @@ -20,12 +20,12 @@ which may cause a delay before pages start being served. Set to `false` to disable Console. *Default: `true`* | `cpu.cgroup.path.override:` - | Override for cgroup cpu path when mounted in a -manner that is inconsistent with `/proc/self/cgroup`. + | *deprecated* This setting has been renamed to `ops.cGroupOverrides.cpuPath` +and the old name will no longer be supported as of 8.0. | `cpuacct.cgroup.path.override:` - | Override for cgroup cpuacct path when mounted -in a manner that is inconsistent with `/proc/self/cgroup`. + | *deprecated* This setting has been renamed to `ops.cGroupOverrides.cpuAcctPath` +and the old name will no longer be supported as of 8.0. | `csp.rules:` | A https://w3c.github.io/webappsec-csp/[content-security-policy] template @@ -438,6 +438,14 @@ not saved in {es}. *Default: `data`* | Set the interval in milliseconds to sample system and process performance metrics. The minimum value is 100. *Default: `5000`* +| `ops.cGroupOverrides.cpuPath:` + | Override for cgroup cpu path when mounted in a +manner that is inconsistent with `/proc/self/cgroup`. + +| `ops.cGroupOverrides.cpuAcctPath:` + | Override for cgroup cpuacct path when mounted +in a manner that is inconsistent with `/proc/self/cgroup`. + | `server.basePath:` | Enables you to specify a path to mount {kib} at if you are running behind a proxy. Use the `server.rewriteBasePath` setting to tell {kib} diff --git a/docs/user/alerting/action-types.asciidoc b/docs/user/alerting/action-types.asciidoc index 1743edb10f92b..be31458ff39fa 100644 --- a/docs/user/alerting/action-types.asciidoc +++ b/docs/user/alerting/action-types.asciidoc @@ -25,7 +25,7 @@ a| <> a| <> -| Push or update data to a new incident in ServiceNow. +| Create an incident in ServiceNow. a| <> diff --git a/docs/user/alerting/action-types/email.asciidoc b/docs/user/alerting/action-types/email.asciidoc index f6a02b9038c02..83e7edc5a016a 100644 --- a/docs/user/alerting/action-types/email.asciidoc +++ b/docs/user/alerting/action-types/email.asciidoc @@ -12,7 +12,7 @@ Email connectors have the following configuration properties: Name:: The name of the connector. The name is used to identify a connector in the management UI connector listing, or in the connector list when configuring an action. Sender:: The from address for all emails sent with this connector, specified in `user@host-name` format. -Host:: Host name of the service provider. If you are using the <> setting, make sure this hostname is whitelisted. +Host:: Host name of the service provider. If you are using the <> setting, make sure this hostname is added to the allowed hosts. Port:: The port to connect to on the service provider. Secure:: If true the connection will use TLS when connecting to the service provider. See https://nodemailer.com/smtp/#tls-options[nodemailer TLS documentation] for more information. Username:: username for 'login' type authentication. diff --git a/docs/user/alerting/action-types/pagerduty.asciidoc b/docs/user/alerting/action-types/pagerduty.asciidoc index 5fd85a1045265..2c9add5233c91 100644 --- a/docs/user/alerting/action-types/pagerduty.asciidoc +++ b/docs/user/alerting/action-types/pagerduty.asciidoc @@ -132,7 +132,7 @@ This is an irreversible action and impacts all alerts that use this connector. PagerDuty connectors have the following configuration properties: Name:: The name of the connector. The name is used to identify a connector in the management UI connector listing, or in the connector list when configuring an action. -API URL:: An optional PagerDuty event URL. Defaults to `https://events.pagerduty.com/v2/enqueue`. If you are using the <> setting, make sure the hostname is whitelisted. +API URL:: An optional PagerDuty event URL. Defaults to `https://events.pagerduty.com/v2/enqueue`. If you are using the <> setting, make sure the hostname is added to the allowed hosts. Integration Key:: A 32 character PagerDuty Integration Key for an integration on a service, also referred to as the routing key. [float] diff --git a/docs/user/alerting/action-types/slack.asciidoc b/docs/user/alerting/action-types/slack.asciidoc index 99bf73c0f5597..a1fe7a2521b22 100644 --- a/docs/user/alerting/action-types/slack.asciidoc +++ b/docs/user/alerting/action-types/slack.asciidoc @@ -11,7 +11,7 @@ The Slack action type uses https://api.slack.com/incoming-webhooks[Slack Incomin Slack connectors have the following configuration properties: Name:: The name of the connector. The name is used to identify a connector in the management UI connector listing, or in the connector list when configuring an action. -Webhook URL:: The URL of the incoming webhook. See https://api.slack.com/messaging/webhooks#getting_started[Slack Incoming Webhooks] for instructions on generating this URL. If you are using the <> setting, make sure the hostname is whitelisted. +Webhook URL:: The URL of the incoming webhook. See https://api.slack.com/messaging/webhooks#getting_started[Slack Incoming Webhooks] for instructions on generating this URL. If you are using the <> setting, make sure the hostname is added to the allowed hosts. [float] [[Preconfigured-slack-configuration]] diff --git a/docs/user/alerting/action-types/webhook.asciidoc b/docs/user/alerting/action-types/webhook.asciidoc index c91c24430e982..659c3afad6bd1 100644 --- a/docs/user/alerting/action-types/webhook.asciidoc +++ b/docs/user/alerting/action-types/webhook.asciidoc @@ -11,7 +11,7 @@ The Webhook action type uses https://github.com/axios/axios[axios] to send a POS Webhook connectors have the following configuration properties: Name:: The name of the connector. The name is used to identify a connector in the management UI connector listing, or in the connector list when configuring an action. -URL:: The request URL. If you are using the <> setting, make sure the hostname is whitelisted. +URL:: The request URL. If you are using the <> setting, make sure the hostname is added to the allowed hosts. Method:: HTTP request method, either `post`(default) or `put`. Headers:: A set of key-value pairs sent as headers with the request User:: An optional username. If set, HTTP basic authentication is used. Currently only basic authentication is supported. diff --git a/docs/user/canvas.asciidoc b/docs/user/canvas.asciidoc index 317ec67dd7c0a..0b0eb7a318495 100644 --- a/docs/user/canvas.asciidoc +++ b/docs/user/canvas.asciidoc @@ -137,7 +137,7 @@ image::images/canvas-map-embed.gif[] . To use the customization options, click the panel menu, then select one of the following options: -* *Edit map* — Opens <> or <> so that you can edit the original saved object. +* *Edit map* — Opens <> or a visualization builder so that you can edit the original saved object. * *Edit panel title* — Adds a title to the saved object. diff --git a/docs/user/dashboard.asciidoc b/docs/user/dashboard.asciidoc deleted file mode 100644 index b812af7e981bf..0000000000000 --- a/docs/user/dashboard.asciidoc +++ /dev/null @@ -1,191 +0,0 @@ -[[dashboard]] -= Dashboard - -[partintro] --- - -A _dashboard_ is a collection of visualizations, searches, and -maps, typically in real-time. Dashboards provide -at-a-glance insights into your data and enable you to drill down into details. - -With *Dashboard*, you can: - -* Add visualizations, saved searches, and maps for side-by-side analysis - -* Arrange dashboard elements to display exactly how you want - -* Customize time ranges to display only the data you want - -* Inspect and edit dashboard elements to find out exactly what kind of data is displayed - -[role="screenshot"] -image:images/Dashboard_example.png[Example dashboard] - -[float] -[[dashboard-read-only-access]] -=== [xpack]#Read only access# -If you see -the read-only icon in the application header, -then you don't have sufficient privileges to create and save dashboards. The buttons to create and edit -dashboards are not visible. For more information, see <>. - -[role="screenshot"] -image::images/dashboard-read-only-badge.png[Example of Dashboard read only access indicator in Kibana header] - --- - -[[dashboard-create-new-dashboard]] -== Create a dashboard - -To create a dashboard, you must have data indexed into {es}, an index pattern -to retrieve the data from {es}, and -visualizations, saved searches, or maps. If these don't exist, you're prompted to -add them as you create the dashboard, or you can add -<>, -which include pre-built dashboards. - -To begin, open the menu, go to *Dashboard*, then click *Create dashboard.* - -[float] -[[dashboard-add-elements]] -=== Add elements - -The visualizations, saved searches, and maps are stored as elements in panels -that you can move and resize. - -You can add elements from multiple indices, and the same element can appear in -multiple dashboards. - -To create an element: - -. Click *Create new*. -. On the *New Visualization* window, click the visualization type. -+ -[role="screenshot"] -image:images/Dashboard_add_new_visualization.png[Example add new visualization to dashboard] -+ -For information on how to create visualizations, see <>. -+ -For information on how to create maps, see <>. - -To add an existing element: - -. Click *Add*. - -. On the *Add panels* flyout, select the panel. -+ -When a dashboard element has a stored query, -both queries are applied. -+ -[role="screenshot"] -image:images/Dashboard_add_visualization.png[Example add visualization to dashboard] - -[float] -[[customizing-your-dashboard]] -=== Arrange dashboard elements - -In *Edit* mode, you can move, resize, customize, and delete panels to suit your needs. - -[[moving-containers]] -* To move a panel, click and hold the panel header and drag to the new location. - -[[resizing-containers]] -* To resize a panel, click the resize control and drag -to the new dimensions. - -* To toggle the use of margins and panel titles, use the *Options* menu. - -* To delete a panel, open the panel menu and select *Delete from dashboard.* Deleting a panel from a -dashboard does *not* delete the saved visualization or search. - -[float] -[[cloning-a-panel]] -=== Clone dashboard elements - -In *Edit* mode, you can clone any panel on a dashboard. - -To clone an existing panel, open the panel menu of the element you wish to clone, then select *Clone panel*. - -* Cloned panels appear beside the original, and will move other panels down to make room if necessary. - -* Clones support all of the original panel's functionality, including renaming, editing, and cloning. - -* All cloned visualizations will appear in the visualization list. - -[role="screenshot"] -image:images/clone_panel.gif[clone panel] - - -[float] -[[viewing-detailed-information]] -=== Inspect and edit elements - -Many dashboard elements allow you to drill down into the data and requests -behind the element. - -From the panel menu, select *Inspect*. -The data that displays depends on the element that you inspect. - -[role="screenshot"] -image:images/Dashboard_inspect.png[Inspect in dashboard] - -To open an element for editing, put the dashboard in *Edit* mode, -and then select *Edit visualization* from the panel menu. The changes you make appear in -every dashboard that uses the element. - -[float] -[[dashboard-customize-filter]] -=== Customize time ranges - -You can configure each visualization, saved search, and map on your dashboard -for a specific time range. For example, you might want one visualization to show -the monthly trend for CPU usage and another to show the current CPU usage. - -From the panel menu, select *Customize time range* to expose a time filter -dedicated to that panel. Panels that are not restricted by a specific -time range are controlled by the -global time filter. - -[role="screenshot"] -image:images/time_range_per_panel.gif[Time range per dashboard panel] - -[float] -[[save-dashboards]] -=== Save the dashboard - -When you're finished adding and arranging the panels, save the dashboard. - -. In the {kib} toolbar, click *Save*. - -. Enter the dashboard *Title* and optional *Description*, then *Save* the dashboard. - -include::{kib-repo-dir}/drilldowns/drilldowns.asciidoc[] -include::{kib-repo-dir}/drilldowns/explore-underlying-data.asciidoc[] - -[[sharing-dashboards]] -== Share the dashboard - -[[embedding-dashboards]] -Share your dashboard outside of {kib}. - -From the *Share* menu, you can: - -* Embed the code in a web page. Users must have {kib} access -to view an embedded dashboard. -* Share a direct link to a {kib} dashboard -* Generate a PDF report -* Generate a PNG report - -TIP: To create a link to a dashboard by title, use: + -`${domain}/${basepath?}/app/dashboards#/list?title=${yourdashboardtitle}` - -TIP: When sharing a link to a dashboard snapshot, use the *Short URL*. Snapshot -URLs are long and can be problematic for Internet Explorer and other -tools. To create a short URL, you must have write access to {kib}. - -[float] -[[import-dashboards]] -=== Export the dashboard - -To export the dashboard, open the menu, then click *Stack Management > Saved Objects*. For more information, -refer to <>. diff --git a/docs/user/dashboard/aggregation-reference.asciidoc b/docs/user/dashboard/aggregation-reference.asciidoc new file mode 100644 index 0000000000000..1bcea3bb36aea --- /dev/null +++ b/docs/user/dashboard/aggregation-reference.asciidoc @@ -0,0 +1,242 @@ +[[aggregation-reference]] +== Aggregation reference + +{kib} supports many types of {ref}/search-aggregations.html[{es} aggregations] that you can use to build complex summaries of your data. + +By using a series of {es} aggregations to extract and process your data, you can create panels that tell a +story about the trends, patterns, and outliers in your data. + +[float] +[[bucket-aggregations]] +=== Bucket aggregations + +For information about Elasticsearch bucket aggregations, refer to {ref}/search-aggregations-bucket.html[Bucket aggregations]. + +[options="header"] +|=== + +| Type | Visualizations | Data table | Markdown | Lens | TSVB + +| Histogram +^| X +^| X +^| X +| +| + +| Date histogram +^| X +^| X +^| X +^| X +^| X + +| Date range +^| X +^| X +^| X +| +| + +| Filter +^| X +^| X +^| X +| +^| X + +| Filters +^| X +^| X +^| X +| +^| X + +| GeoHash grid +^| X +^| X +^| X +| +| + +| IP range +^| X +^| X +^| X +| +| + +| Range +^| X +^| X +^| X +| +| + +| Terms +^| X +^| X +^| X +^| X +^| X + +| Significant terms +^| X +^| X +^| X +| +^| X + +|=== + +[float] +[[metrics-aggregations]] +=== Metrics aggregations + +For information about Elasticsearch metrics aggregations, refer to {ref}/search-aggregations-metrics.html[Metrics aggregations]. + +[options="header"] +|=== + +| Type | Visualizations | Data table | Markdown | Lens | TSVB + +| Average +^| X +^| X +^| X +^| X +^| X + +| Sum +^| X +^| X +^| X +^| X +^| X + +| Unique count (Cardinality) +^| X +^| X +^| X +^| X +^| X + +| Max +^| X +^| X +^| X +^| X +^| X + +| Min +^| X +^| X +^| X +^| X +^| X + +| Percentiles +^| X +^| X +^| X +| +^| X + +| Percentiles Rank +^| X +^| X +^| X +| +^| X + +| Top hit +^| X +^| X +^| X +| +^| X + +| Value count +| +| +| +| +^| X + +|=== + +[float] +[[pipeline-aggregations]] +=== Pipeline aggregations + +For information about Elasticsearch pipeline aggregations, refer to {ref}/search-aggregations-pipeline.html[Pipeline aggregations]. + +[options="header"] +|=== + +| Type | Visualizations | Data table | Markdown | Lens | TSVB + +| Avg bucket +^| X +^| X +^| X +| +^| X + +| Derivative +^| X +^| X +^| X +| +^| X + +| Max bucket +^| X +^| X +^| X +| +^| X + +| Min bucket +^| X +^| X +^| X +| +^| X + +| Sum bucket +^| X +^| X +^| X +^| +^| X + +| Moving average +^| X +^| X +^| X +^| +^| X + +| Cumulative sum +^| X +^| X +^| X +^| +^| X + +| Bucket script +| +| +| +| +^| X + +| Serial differencing +^| X +^| X +^| X +| +^| X + +|=== diff --git a/docs/user/dashboard/dashboard-drilldown.asciidoc b/docs/user/dashboard/dashboard-drilldown.asciidoc new file mode 100644 index 0000000000000..84701cae2ecc6 --- /dev/null +++ b/docs/user/dashboard/dashboard-drilldown.asciidoc @@ -0,0 +1,76 @@ +[[dashboard-drilldown]] +=== Dashboard drilldown + +The dashboard drilldown allows you to navigate from one dashboard to another dashboard. +For example, you might have a dashboard that shows the overall status of multiple data centers. +You can create a drilldown that navigates from this dashboard to a dashboard +that shows a single data center or server. + +This example shows a dashboard panel that contains a pie chart with a configured dashboard drilldown: + +[role="screenshot"] +image::images/drilldown_on_piechart.gif[Drilldown on pie chart that navigates to another dashboard] + +[float] +[[drilldowns-example]] +==== Try it: Create a dashboard drilldown + +Create the *Host Overview* drilldown shown above. + +*Set up the dashboards* + +. Add the <> data set. + +. Create a new dashboard, called `Host Overview`, and include these visualizations +from the sample data set: ++ +[%hardbreaks] +*[Logs] Heatmap* +*[Logs] Visitors by OS* +*[Logs] Host, Visits, and Bytes Table* +*[Logs] Total Requests and Bytes* ++ +TIP: If you don’t see data for a panel, try changing the time range. + +. Open the *[Logs] Web traffic* dashboard. + +. Set a search and filter. ++ +[%hardbreaks] +Search: `extension.keyword:( “gz” or “css” or “deb”)` +Filter: `geo.src : CN` + + +*Create the drilldown* + + +. In the dashboard menu bar, click *Edit*. + +. In *[Logs] Visitors by OS*, open the panel menu, and then select *Create drilldown*. + +. Pick *Go to dashboard* action. + +. Give the drilldown a name. + +. Select *Host Overview* as the destination dashboard. + +. Keep both filters enabled so that the drilldown carries over the global filters and date range. ++ +Your input should look similar to this: ++ +[role="screenshot"] +image::images/drilldown_create.png[Create drilldown with entries for drilldown name and destination] + +. Click *Create drilldown.* + +. Save the dashboard. ++ +If you don’t save the drilldown, and then navigate away, the drilldown is lost. + +. In *[Logs] Visitors by OS*, click the `win 8` slice of the pie, and then select the name of your drilldown. ++ +[role="screenshot"] +image::images/drilldown_on_panel.png[Drilldown on pie chart that navigates to another dashboard] ++ +You are navigated to your destination dashboard. Verify that the search query, filters, +and time range are carried over. diff --git a/docs/user/dashboard/dashboard.asciidoc b/docs/user/dashboard/dashboard.asciidoc new file mode 100644 index 0000000000000..c8bff91be91a6 --- /dev/null +++ b/docs/user/dashboard/dashboard.asciidoc @@ -0,0 +1,506 @@ +[[dashboard]] += Dashboard + +[partintro] +-- + +A _dashboard_ is a collection of panels that you use to analyze your data. On a dashboard, you can add a variety of panels that +you can rearrange and tell a story about your data. Panels contain everything you need, including visualizations, +interactive controls, markdown, and more. + +With *Dashboard*s, you can: + +* Add multiple panels to see many aspects and views of your data in one place. + +* Arrange panels for analysis and comparison. + +* Add text and images to provide context to the panels and make them easy to consume. + +* Create and apply filters to focus on the data you want to display. + +* Control who can use your data, and share the dashboard with a small or large audience. + +* Generate reports based on your findings. + +To begin, open the menu, go to *Dashboard*, then click *Create dashboard*. + +[role="screenshot"] +image:images/Dashboard_example.png[Example dashboard] + +[float] +[[dashboard-read-only-access]] +=== [xpack]#Read only access# +If you see +the read-only icon in the application header, +then you don't have sufficient privileges to create and save dashboards. The buttons to create and edit +dashboards are not visible. For more information, see <>. + +[role="screenshot"] +image::images/dashboard-read-only-badge.png[Example of Dashboard read only access indicator in Kibana header] + +[float] +[[types-of-panels]] +== Types of panels + +Panels contain everything you need to tell a story about you data, including visualizations, +interactive controls, Markdown, and more. + +[cols="50, 50"] +|=== + +a| *Area* + +Displays data points, connected by a line, where the area between the line and axes are shaded. +Use area charts to compare two or more categories over time, and display the magnitude of trends. + +| image:images/area.png[Area chart] + +a| *Stacked area* + +Displays the evolution of the value of several data groups. The values of each group are displayed +on top of each other. Use stacked area charts to visualize part-to-whole relationships, and to show +how each category contributes to the cumulative total. + +| image:images/stacked_area.png[Stacked area chart] + +a| *Bar* + +Displays bars side-by-side where each bar represents a category. Use bar charts to compare data across a +large number of categories, display data that includes categories with negative values, and easily identify +the categories that represent the highest and lowest values. Kibana also supports horizontal bar charts. + +| image:images/bar.png[Bar chart] + +a| *Stacked bar* + +Displays numeric values across two or more categories. Use stacked bar charts to compare numeric values between +levels of a categorical value. Kibana also supports stacked horizontal bar charts. + +| image:images/stacked_bar.png[Stacked area chart] + + +a| *Line* + +Displays data points that are connected by a line. Use line charts to visualize a sequence of values, discover +trends over time, and forecast future values. + +| image:images/line.png[Line chart] + +a| *Pie* + +Displays slices that represent a data category, where the slice size is proportional to the quantity it represents. +Use pie charts to show comparisons between multiple categories, illustrate the dominance of one category over others, +and show percentage or proportional data. + +| image:images/pie.png[Pie chart] + +a| *Donut* + +Similar to the pie chart, but the central circle is removed. Use donut charts when you’d like to display multiple statistics at once. + +| image:images/donut.png[Donut chart] + + +a| *Tree map* + +Relates different segments of your data to the whole. Each rectangle is subdivided into smaller rectangles, or sub branches, based on +its proportion to the whole. Use treemaps to make efficient use of space to show percent total for each category. + +| image:images/treemap.png[Tree map] + + +a| *Heat map* + +Displays graphical representations of data where the individual values are represented by colors. Use heat maps when your data set includes +categorical data. For example, use a heat map to see the flights of origin countries compared to destination countries using the sample flight data. + +| image:images/heat_map.png[Heat map] + +a| *Goal* + +Displays how your metric progresses toward a fixed goal. Use the goal to display an easy to read visual of the status of your goal progression. + +| image:images/goal.png[Goal] + + +a| *Gauge* + +Displays your data along a scale that changes color according to where your data falls on the expected scale. Use the gauge to show how metric +values relate to reference threshold values, or determine how a specified field is performing versus how it is expected to perform. + +| image:images/gauge.png[Gauge] + + +a| *Metric* + +Displays a single numeric value for an aggregation. Use the metric visualization when you have a numeric value that is powerful enough to tell +a story about your data. + +| image:images/metric.png[Metric] + + +a| *Data table* + +Displays your raw data or aggregation results in a tabular format. Use data tables to display server configuration details, track counts, min, +or max values for a specific field, and monitor the status of key services. + +| image:images/data_table.png[Data table] + + +a| *Tag cloud* + +Graphical representations of how frequently a word appears in the source text. Use tag clouds to easily produce a summary of large documents and +create visual art for a specific topic. + +| image:images/tag_cloud.png[Tag cloud] + + +a| *Maps* + +For all your mapping needs, use <>. + +| image:images/maps.png[Maps] + + +|=== + +[float] +[[create-panels]] +== Create panels + +To create a panel, make sure you have {ref}/getting-started-index.html[data indexed into {es}] and an <> +to retrieve the data from {es}. If you aren’t ready to use your own data, {kib} comes with several pre-built dashboards that you can test out. For more information, +refer to <>. + +To begin, click *Create new*, then choose one of the following options on the +*New Visualization* window: + +* Click on the type of panel you want to create, then configure the options. + +* Select an editor to help you create the panel. + +[role="screenshot"] +image:images/Dashboard_add_new_visualization.png[Example add new visualization to dashboard] + +{kib} provides you with several editors that help you create panels. + +[float] +[[lens]] +=== Create panels with Lens + +*Lens* is the simplest and fastest way to create powerful visualizations of your data. To use *Lens*, you drag and drop as many data fields +as you want onto the visualization builder pane, and *Lens* uses heuristics to decide how to apply each field to the visualization. + +With *Lens*, you can: + +* Use the automatically generated suggestions to change the visualization type. +* Create visualizations with multiple layers and indices. +* Change the aggregation and labels to customize the data. + +[role="screenshot"] +image::images/lens_drag_drop.gif[Drag and drop] + +TIP: Drag-and-drop capabilities are available only when *Lens* knows how to use the data. If *Lens* is unable to automatically generate a +visualization, configure the customization options for your visualization. + +[float] +[[fiter-the-data-fields]] +==== Filter the data fields + +The data fields that are displayed are based on the selected <> and the <>. + +To view the data fields in a different index pattern, click the index pattern, then select a new one. The data fields automatically update. + +To filter the data fields: + +* Enter the name in the *Search field names*. +* Click *Field by type*, then select the filter. To show all fields in the index pattern, deselect *Only show fields with data*. + +[float] +[[view-data-summaries]] +==== View data summaries + +To help you decide exactly the data you want to display, get a quick summary of each field. The summary shows the distribution of +values within the specified time range. + +To view the data field summary information, navigate to the field, then click *i*. + +[role="screenshot"] +image::images/lens_data_info.png[Data summary window] + +[float] +[[change-the-visualization-type]] +==== Change the visualization type + +Use the automatically generated suggestions to change the visualization type, or manually select the type of visualization you want to view. + +*Suggestions* are shortcuts to alternative visualizations that *Lens* generates for you. + +[role="screenshot"] +image::images/lens_suggestions.gif[Visualization suggestions] + +If you’d like to use a visualization type outside of the suggestions, click the visualization type, then select a new one. + +[role="screenshot"] +image::images/lens_viz_types.png[] + +When there is an exclamation point (!) next to a visualization type, *Lens* is unable to transfer your data, but still allows you to make the change. + +[float] +[[customize-the-data]] +==== Customize the data + +For each visualization type, you can customize the aggregation and labels. The options available depend on the selected visualization type. + +. Click a data field name in the editor, or click *Drop a field here*. +. Change the options that appear. ++ +[role="screenshot"] +image::images/lens_aggregation_labels.png[Quick function options] + +[float] +[[add-layers-and-indices]] +==== Add layers and indices + +To compare and analyze data from different sources, you can visualize multiple data layers and indices. Multiple layers and indices are +supported in area, line, and bar charts. + +To add a layer, click *+*, then drag and drop the data fields for the new layer. + +[role="screenshot"] +image::images/lens_layers.png[Add layers] + +To view a different index, click the index name in the editor, then select a new one. + +[role="screenshot"] +image::images/lens_index_pattern.png[Add index pattern] + +Ready to try out *Lens*? Refer to the <>. + +[float] +[[tsvb]] +=== Create panels with TSVB + +*TSVB* is a time series data visualizer that allows you to use the full power of the Elasticsearch aggregation framework. To use *TSVB*, +you can combine an infinite number of <> to display your data. + +With *TSVB*, you can: + +* Create visualizations, data tables, and markdown panels. +* Create visualizations with multiple indices. +* Change the aggregation and labels to customize the data. ++ +[role="screenshot"] +image::images/tsvb.png[TSVB UI] + +[float] +[[configure-the-data]] +==== Configure the data + +With *TSVB*, you can add and display multiple data sets to compare and analyze. {kib} uses many types of <> that you can use to build +complex summaries of that data. + +. Select *Data*. If you are using *Table*, select *Columns*. +. From the *Aggregation* drop down, select the aggregation you want to visualize. ++ +If you don’t see any data, change the <>. ++ +To add multiple aggregations, click *+*. +. From the *Group by* drop down, select how you want to group or split the data. +. To add another data set, click *+*. ++ +When you have more than one aggregation, the last value is displayed, which is indicated by the eye icon. + +[float] +[[change-the-data-display]] +==== Change the data display + +To find the best way to display your data, *TSVB* supports several types of panels and charts. + +To change the *Time Series* chart type: + +. Click *Data > Options*. +. Select the *Chart type*. + +To change the panel type, click on the panel options: + +[role="screenshot"] +image::images/tsvb_change_display.gif[TSVB change the panel type] + +[float] +[[custommize-the-data]] +==== Customize the data + +View data in a different <>, and change the data label name and colors. The options available depend on the panel type. + +To change the index pattern, click *Panel options*, then enter the new *Index Pattern*. + +To override the index pattern for a data set, click *Data > Options*. Select *Yes* to override, then enter the new *Index pattern*. + +To change the data labels and colors: + +. Click *Data*. +. Enter the *Label* name, which *TSVB* uses on the legends and data labels. +. Click the color picker, then select the color for the data. ++ +[role="screenshot"] +image::images/tsvb_color_picker.png[TSVB color picker] + +[float] +[[add-annotations]] +==== Add annotations + +You can overlay annotation events on top of your *Time Series* charts. The options available depend on the data source. + +To begin, click *Annotations*, click *Add data source*, then configure the options. + +[role="screenshot"] +image::images/tsvb_annotations.png[TSVB annotations] + +[float] +[[filter-the-panel]] +==== Filter the panel + +The data that displays on the panel is based on the <> and <>. +You can filter the data on the panels using the <>. + +Click *Panel options*, then enter the syntax in the *Panel Filter* field. + +If you want to ignore filters from all of {kib}, select *Yes* for *Ignore global filter*. + +[float] +[[vega]] +=== Create custom panels with Vega + +Build custom visualizations using *Vega* and *Vega-Lite*, backed by one or more data sources including {es}, Elastic Map Service, +URL, or static data. Use the {kib} extensions to embed *Vega* in your dashboard, and add interactive tools. + +Use *Vega* and *Vega-Lite* when you want to create a visualization for: + +* Aggregations that use `nested` or `parent/child` mapping +* Aggregations without an index pattern +* Queries that use custom time filters +* Complex calculations +* Extracting data from _source instead of aggregations +* Scatter charts, sankey charts, and custom maps +* Using an unsupported visual theme + +[role="screenshot"] +image::images/vega.png[Vega UI] + +*Vega* and *Vega-Lite* are declarative formats that: + +* Create complex visualizations +* Use JSON and a different syntax for declaring visualizations +* Are not fully interchangeable + +For more information about *Vega* and *Vega-Lite*, refer to: + +* <> +* <> +* <> +* <> + +[float] +[[timelion]] +=== Create panels with Timelion + +*Timelion* is a time series data visualizer that enables you to combine independent data sources within a single visualization. + +*Timelion* is driven by a simple expression language that you use to: + +* Retrieve time series data +* Perform calculation to tease out the answers to complex questions +* Visualize the results + +[role="screenshot"] +image::images/timelion.png[Timelion UI] + +Ready to try out Timelion? For step-by-step tutorials, refer to: + +* <> +* <> +* <> + +[float] +[[timelion-deprecation]] +==== Timelion app deprecation + +Deprecated since 7.0, the Timelion app will be removed in 8.0. If you have any Timelion worksheets, you must migrate them to a dashboard. + +NOTE: Only the Timelion app is deprecated. {kib} continues to support Timelion +visualizations on dashboards and in Visualize and Canvas. + +To migrate a Timelion worksheet to a dashboard: + +. Open the menu, click **Dashboard**, then click **Create dashboard**. + +. On the dashboard, click **Create New**, then select the Timelion visualization. + +. On a new tab, open the Timelion app, select the chart you want to copy, and copy its expression. ++ +[role="screenshot"] +image::images/timelion-copy-expression.png[] + +. Return to the other tab and paste the copied expression to the *Timelion Expression* field and click **Update**. ++ +[role="screenshot"] +image::images/timelion-vis-paste-expression.png[] + +. Save the new visualization, give it a name, and click **Save and Return**. ++ +Your Timelion visualization will appear on the dashboard. Repeat this for all your charts on each worksheet. ++ +[role="screenshot"] +image::images/timelion-dashboard.png[] + +[float] +[[save-panels]] +== Save panels + +When you’ve finished making changes, save the panels. + +. Click *Save*. +. Add the *Title* and optional *Description*. +. Click *Save and return*. + +[float] +[[add-existing-panels]] +== Add existing panels + +Add panels that you’ve already created to your dashboard. + +On the dashboard, click *Add an existing*, then select the panel you want to add. + +When a panel contains a stored query, both queries are applied. + +[role="screenshot"] +image:images/Dashboard_add_visualization.png[Example add visualization to dashboard] + +To make changes to the panel, put the dashboard in *Edit* mode, then select the edit option from the panel menu. +The changes you make appear in every dashboard that uses the panel, except if you edit the panel title. Changes to the panel title appear only on the dashboard where you made the change. + +[float] +[[save-dashboards]] +== Save dashboards + +When you’ve finished adding the panels, save the dashboard. + +. In the toolbar, click *Save*. + +. Enter the dashboard *Title* and optional *Description*, then *Save* the dashboard. + +-- +include::edit-dashboards.asciidoc[] + +include::explore-dashboard-data.asciidoc[] + +include::drilldowns.asciidoc[] + +include::share-dashboards.asciidoc[] + +include::tutorials.asciidoc[] + +include::aggregation-reference.asciidoc[] + +include::vega-reference.asciidoc[] diff --git a/docs/user/dashboard/drilldowns.asciidoc b/docs/user/dashboard/drilldowns.asciidoc new file mode 100644 index 0000000000000..85230f1b6f70d --- /dev/null +++ b/docs/user/dashboard/drilldowns.asciidoc @@ -0,0 +1,51 @@ +[[drilldowns]] +== Use drilldowns for dashboard actions + +Drilldowns, also known as custom actions, allow you to configure a +workflow for analyzing and troubleshooting your data. +For example, using a drilldown, you can navigate from one dashboard to another, +taking the current time range, filters, and other parameters with you, +so the context remains the same. You can continue your analysis from a new perspective. + +[role="screenshot"] +image::images/drilldown_on_piechart.gif[Drilldown on pie chart that navigates to another dashboard] + +Drilldowns are specific to the dashboard panel for which you create them—they are not shared across panels. A panel can have multiple drilldowns. + +[float] +[[actions]] +=== Drilldown actions + +Drilldowns are user-configurable {kib} actions that are stored with the dashboard metadata. +Kibana provides the following types of actions: + +[cols="2"] +|=== + +a| <> + +| Navigate to a dashboard. + +a| <> + +| Navigate to external or internal URL. + +|=== + +[NOTE] +============================================== +Some action types are paid commercial features, while others are free. +For a comparison of the Elastic subscription levels, +see https://www.elastic.co/subscriptions[the subscription page]. +============================================== + +[float] +[[code-drilldowns]] +=== Code drilldowns +Third-party developers can create drilldowns. +Refer to {kib-repo}blob/{branch}/x-pack/examples/ui_actions_enhanced_examples[this example plugin] +to learn how to code drilldowns. + +include::dashboard-drilldown.asciidoc[] +include::url-drilldown.asciidoc[] + diff --git a/docs/user/dashboard/edit-dashboards.asciidoc b/docs/user/dashboard/edit-dashboards.asciidoc new file mode 100644 index 0000000000000..7534ea1e9e9fb --- /dev/null +++ b/docs/user/dashboard/edit-dashboards.asciidoc @@ -0,0 +1,115 @@ +[[edit-dashboards]] +== Edit dashboards + +Now that you have added panels to your dashboard, you can add filter panels to interact with the data, and Markdown panels to add context to the dashboard. +To make your dashboard look the way you want, use the editing options. + +[float] +[[add-controls]] +=== Add controls + +To filter the data on your dashboard in real-time, add a *Controls* panel. + +You can add two types of *Controls*: + +* Options list — Filters content based on one or more specified options. The dropdown menu is dynamically populated with the results of a terms aggregation. +For example, use the options list on the sample flight dashboard when you want to filter the data by origin city and destination city. + +* Range slider — Filters data within a specified range of numbers. The minimum and maximum values are dynamically populated with the results of a +min and max aggregation. For example, use the range slider when you want to filter the sample flight dashboard by a specific average ticket price. + +[role="screenshot"] +image::images/dashboard-controls.png[] + +To configure *Controls* for your dashboard: + +. Click *Options*, then configure the following: + +* *Update Kibana filters on each change* — When selected, all interactive inputs create filters that refresh the dashboard. When unselected, + {kib} filters are created only when you click *Apply changes*. + +* *Use time filter* — When selected, the aggregations that generate the options list and time range are connected to the <>. + +* *Pin filters to global state* — When selected, all filters created by interacting with the inputs are automatically pinned. + +. Click *Update*. + +[float] +[[add-markdown]] +=== Add Markdown + +*Markdown* is a text entry field that accepts GitHub-flavored Markdown text. When you enter the text, the tool populates the results on the dashboard. + +Use Markdown when you want to add context to the other panels on your dashboard, such as important information, instructions and images. + +For information about GitHub-flavored Markdown text, click *Help*. + +For example, when you enter: + +[role="screenshot"] +image::images/markdown_example_1.png[] + +The following instructions are displayed: + +[role="screenshot"] +image::images/markdown_example_2.png[] + +Or when you enter: + +[role="screenshot"] +image::images/markdown_example_3.png[] + +The following image is displayed: + +[role="screenshot"] +image::images/markdown_example_4.png[] + +[float] +[[arrange-panels]] +[[moving-containers]] +[[resizing-containers]] +=== Arrange panels + +To make your dashboard panels look exactly how you want, you can move, resize, customize, and delete them. + +Put the dashboard in *Edit* mode, then use the following options: + +* To move, click and hold the panel header, then drag to the new location. + +* To resize, click the resize control, then drag to the new dimensions. + +* To delete, open the panel menu, then select Delete from dashboard. When you delete a panel from the dashboard, the +visualization or saved search from the panel is still available in Kibana. + +[float] +[[clone-panels]] +=== Clone panels + +To duplicate a panel and its configured functionality, clone the panel. Cloned panels support all of the original functionality, +including renaming, editing, and cloning. + +. Put the dashboard in *Edit* mode. + +. For the panel you want to clone, open the panel menu, then select *Clone panel*. + +Cloned panels appear beside the original, and move other panels down to make room when necessary. +All cloned visualization panels appear in the visualization list. + +[role="screenshot"] +image:images/clone_panel.gif[clone panel] + +[float] +[[dashboard-customize-filter]] +=== Customize time ranges + +You can configure each visualization, saved search, and map on your dashboard +for a specific time range. For example, you might want one visualization to show +the monthly trend for CPU usage and another to show the current CPU usage. + +From the panel menu, select *Customize time range* to expose a time filter +dedicated to that panel. Panels that are not restricted by a specific +time range are controlled by the +<>. + +[role="screenshot"] +image:images/time_range_per_panel.gif[Time range per dashboard panel] diff --git a/docs/user/dashboard/explore-dashboard-data.asciidoc b/docs/user/dashboard/explore-dashboard-data.asciidoc new file mode 100644 index 0000000000000..238dfb79e900b --- /dev/null +++ b/docs/user/dashboard/explore-dashboard-data.asciidoc @@ -0,0 +1,18 @@ +[[explore-dashboard-data]] +== Explore dashboard data + +Get a closer look at your data by inspecting elements and using drilldown actions. + +[float] +[[viewing-detailed-information]] +=== Inspect elements + +To view the data and requests behind the visualizations and saved searches, you can drill down into the elements. + +From the panel menu, select *Inspect*. +The data that displays depends on the element that you inspect. + +[role="screenshot"] +image:images/Dashboard_inspect.png[Inspect in dashboard] + +include::explore-underlying-data.asciidoc[] diff --git a/docs/user/dashboard/explore-underlying-data.asciidoc b/docs/user/dashboard/explore-underlying-data.asciidoc new file mode 100644 index 0000000000000..9b7be21dc45d2 --- /dev/null +++ b/docs/user/dashboard/explore-underlying-data.asciidoc @@ -0,0 +1,27 @@ +[float] +[[explore-the-underlying-data]] +=== Explore the underlying data for panels + +To explore the underlying data of the panels on your dashboard, {kib} opens *Discover*, +where you can view and filter the data in the visualization panel. When {kib} opens *Discover*, the index pattern, filters, query, and time range for the visualization continue to apply. + +TIP: The *Explore underlying data* option is available only for visualization panels with a single index pattern. + +To use the *Explore underlying data* option: + +* Click the from the panel menu, then click *Explore underlying data*. ++ +[role="screenshot"] +image::images/explore_data_context_menu.png[Explore underlying data from panel context menu] + +* Interact with the chart, then click *Explore underlying data* on the menu that appears. ++ +[role="screenshot"] +image::images/explore_data_in_chart.png[Explore underlying data from chart] ++ +To enable, open `kibana.yml`, then add the following: + +["source","yml"] +----------- +xpack.discoverEnhanced.actions.exploreDataInChart.enabled: true +----------- diff --git a/docs/user/dashboard/images/area.png b/docs/user/dashboard/images/area.png new file mode 100644 index 0000000000000..85d21a9e178c5 Binary files /dev/null and b/docs/user/dashboard/images/area.png differ diff --git a/docs/user/dashboard/images/bar.png b/docs/user/dashboard/images/bar.png new file mode 100644 index 0000000000000..f1db847655947 Binary files /dev/null and b/docs/user/dashboard/images/bar.png differ diff --git a/docs/user/dashboard/images/data_table.png b/docs/user/dashboard/images/data_table.png new file mode 100644 index 0000000000000..3e08ec526ba57 Binary files /dev/null and b/docs/user/dashboard/images/data_table.png differ diff --git a/docs/user/dashboard/images/donut.png b/docs/user/dashboard/images/donut.png new file mode 100644 index 0000000000000..a662f58ba553b Binary files /dev/null and b/docs/user/dashboard/images/donut.png differ diff --git a/docs/drilldowns/images/drilldown_create.png b/docs/user/dashboard/images/drilldown_create.png similarity index 100% rename from docs/drilldowns/images/drilldown_create.png rename to docs/user/dashboard/images/drilldown_create.png diff --git a/docs/drilldowns/images/drilldown_menu.png b/docs/user/dashboard/images/drilldown_menu.png similarity index 100% rename from docs/drilldowns/images/drilldown_menu.png rename to docs/user/dashboard/images/drilldown_menu.png diff --git a/docs/drilldowns/images/drilldown_on_panel.png b/docs/user/dashboard/images/drilldown_on_panel.png similarity index 100% rename from docs/drilldowns/images/drilldown_on_panel.png rename to docs/user/dashboard/images/drilldown_on_panel.png diff --git a/docs/drilldowns/images/drilldown_on_piechart.gif b/docs/user/dashboard/images/drilldown_on_piechart.gif similarity index 100% rename from docs/drilldowns/images/drilldown_on_piechart.gif rename to docs/user/dashboard/images/drilldown_on_piechart.gif diff --git a/docs/user/dashboard/images/drilldown_pick_an_action.png b/docs/user/dashboard/images/drilldown_pick_an_action.png new file mode 100644 index 0000000000000..c99e931e3fbe1 Binary files /dev/null and b/docs/user/dashboard/images/drilldown_pick_an_action.png differ diff --git a/docs/drilldowns/images/explore_data_context_menu.png b/docs/user/dashboard/images/explore_data_context_menu.png similarity index 100% rename from docs/drilldowns/images/explore_data_context_menu.png rename to docs/user/dashboard/images/explore_data_context_menu.png diff --git a/docs/drilldowns/images/explore_data_in_chart.png b/docs/user/dashboard/images/explore_data_in_chart.png similarity index 100% rename from docs/drilldowns/images/explore_data_in_chart.png rename to docs/user/dashboard/images/explore_data_in_chart.png diff --git a/docs/user/dashboard/images/gauge.png b/docs/user/dashboard/images/gauge.png new file mode 100644 index 0000000000000..c4aef7f5f6854 Binary files /dev/null and b/docs/user/dashboard/images/gauge.png differ diff --git a/docs/user/dashboard/images/goal.png b/docs/user/dashboard/images/goal.png new file mode 100644 index 0000000000000..967e64f722d74 Binary files /dev/null and b/docs/user/dashboard/images/goal.png differ diff --git a/docs/user/dashboard/images/heat_map.png b/docs/user/dashboard/images/heat_map.png new file mode 100644 index 0000000000000..d4a6502509f6f Binary files /dev/null and b/docs/user/dashboard/images/heat_map.png differ diff --git a/docs/user/dashboard/images/lens_aggregation_labels.png b/docs/user/dashboard/images/lens_aggregation_labels.png new file mode 100644 index 0000000000000..9dcf1d226a197 Binary files /dev/null and b/docs/user/dashboard/images/lens_aggregation_labels.png differ diff --git a/docs/user/dashboard/images/lens_data_info.png b/docs/user/dashboard/images/lens_data_info.png new file mode 100644 index 0000000000000..5ea6fc64a217d Binary files /dev/null and b/docs/user/dashboard/images/lens_data_info.png differ diff --git a/docs/user/dashboard/images/lens_drag_drop.gif b/docs/user/dashboard/images/lens_drag_drop.gif new file mode 100644 index 0000000000000..ca62115e7ea3a Binary files /dev/null and b/docs/user/dashboard/images/lens_drag_drop.gif differ diff --git a/docs/user/dashboard/images/lens_index_pattern.png b/docs/user/dashboard/images/lens_index_pattern.png new file mode 100644 index 0000000000000..90a34b7a5d225 Binary files /dev/null and b/docs/user/dashboard/images/lens_index_pattern.png differ diff --git a/docs/user/dashboard/images/lens_layers.png b/docs/user/dashboard/images/lens_layers.png new file mode 100644 index 0000000000000..7410425a6977e Binary files /dev/null and b/docs/user/dashboard/images/lens_layers.png differ diff --git a/docs/user/dashboard/images/lens_suggestions.gif b/docs/user/dashboard/images/lens_suggestions.gif new file mode 100644 index 0000000000000..3258e924cb205 Binary files /dev/null and b/docs/user/dashboard/images/lens_suggestions.gif differ diff --git a/docs/user/dashboard/images/lens_viz_types.png b/docs/user/dashboard/images/lens_viz_types.png new file mode 100644 index 0000000000000..2ecfa6bd0e0e3 Binary files /dev/null and b/docs/user/dashboard/images/lens_viz_types.png differ diff --git a/docs/user/dashboard/images/line.png b/docs/user/dashboard/images/line.png new file mode 100644 index 0000000000000..123fa74dc7e14 Binary files /dev/null and b/docs/user/dashboard/images/line.png differ diff --git a/docs/user/dashboard/images/maps.png b/docs/user/dashboard/images/maps.png new file mode 100644 index 0000000000000..65336451cc1c7 Binary files /dev/null and b/docs/user/dashboard/images/maps.png differ diff --git a/docs/user/dashboard/images/metric.png b/docs/user/dashboard/images/metric.png new file mode 100644 index 0000000000000..f8182d538a608 Binary files /dev/null and b/docs/user/dashboard/images/metric.png differ diff --git a/docs/user/dashboard/images/pie.png b/docs/user/dashboard/images/pie.png new file mode 100644 index 0000000000000..927fbb98adc07 Binary files /dev/null and b/docs/user/dashboard/images/pie.png differ diff --git a/docs/user/dashboard/images/stacked_area.png b/docs/user/dashboard/images/stacked_area.png new file mode 100644 index 0000000000000..ae66fc51176f9 Binary files /dev/null and b/docs/user/dashboard/images/stacked_area.png differ diff --git a/docs/user/dashboard/images/stacked_bar.png b/docs/user/dashboard/images/stacked_bar.png new file mode 100644 index 0000000000000..aa90ce3685cff Binary files /dev/null and b/docs/user/dashboard/images/stacked_bar.png differ diff --git a/docs/user/dashboard/images/tag_cloud.png b/docs/user/dashboard/images/tag_cloud.png new file mode 100644 index 0000000000000..976c456e4a1f1 Binary files /dev/null and b/docs/user/dashboard/images/tag_cloud.png differ diff --git a/docs/user/dashboard/images/timelion.png b/docs/user/dashboard/images/timelion.png new file mode 100644 index 0000000000000..a663791575077 Binary files /dev/null and b/docs/user/dashboard/images/timelion.png differ diff --git a/docs/user/dashboard/images/treemap.png b/docs/user/dashboard/images/treemap.png new file mode 100644 index 0000000000000..5df3c9526bfeb Binary files /dev/null and b/docs/user/dashboard/images/treemap.png differ diff --git a/docs/user/dashboard/images/tsvb.png b/docs/user/dashboard/images/tsvb.png new file mode 100644 index 0000000000000..09a3c7e86eb56 Binary files /dev/null and b/docs/user/dashboard/images/tsvb.png differ diff --git a/docs/user/dashboard/images/tsvb_annotations.png b/docs/user/dashboard/images/tsvb_annotations.png new file mode 100644 index 0000000000000..510f3c2672118 Binary files /dev/null and b/docs/user/dashboard/images/tsvb_annotations.png differ diff --git a/docs/user/dashboard/images/tsvb_change_display.gif b/docs/user/dashboard/images/tsvb_change_display.gif new file mode 100644 index 0000000000000..09d435b0a6b24 Binary files /dev/null and b/docs/user/dashboard/images/tsvb_change_display.gif differ diff --git a/docs/user/dashboard/images/tsvb_color_picker.png b/docs/user/dashboard/images/tsvb_color_picker.png new file mode 100644 index 0000000000000..4f033579d0005 Binary files /dev/null and b/docs/user/dashboard/images/tsvb_color_picker.png differ diff --git a/docs/user/dashboard/images/url_drilldown_github.png b/docs/user/dashboard/images/url_drilldown_github.png new file mode 100644 index 0000000000000..d2eaec311948e Binary files /dev/null and b/docs/user/dashboard/images/url_drilldown_github.png differ diff --git a/docs/user/dashboard/images/url_drilldown_go_to_github.gif b/docs/user/dashboard/images/url_drilldown_go_to_github.gif new file mode 100644 index 0000000000000..7cca3f72d5a68 Binary files /dev/null and b/docs/user/dashboard/images/url_drilldown_go_to_github.gif differ diff --git a/docs/user/dashboard/images/url_drilldown_pick_an_action.png b/docs/user/dashboard/images/url_drilldown_pick_an_action.png new file mode 100644 index 0000000000000..c99e931e3fbe1 Binary files /dev/null and b/docs/user/dashboard/images/url_drilldown_pick_an_action.png differ diff --git a/docs/user/dashboard/images/url_drilldown_popup.png b/docs/user/dashboard/images/url_drilldown_popup.png new file mode 100644 index 0000000000000..392edd16ea328 Binary files /dev/null and b/docs/user/dashboard/images/url_drilldown_popup.png differ diff --git a/docs/user/dashboard/images/url_drilldown_trigger_picker.png b/docs/user/dashboard/images/url_drilldown_trigger_picker.png new file mode 100644 index 0000000000000..2fe930f35dce8 Binary files /dev/null and b/docs/user/dashboard/images/url_drilldown_trigger_picker.png differ diff --git a/docs/user/dashboard/images/url_drilldown_url_template.png b/docs/user/dashboard/images/url_drilldown_url_template.png new file mode 100644 index 0000000000000..d8515afe66a80 Binary files /dev/null and b/docs/user/dashboard/images/url_drilldown_url_template.png differ diff --git a/docs/user/dashboard/images/vega.png b/docs/user/dashboard/images/vega.png new file mode 100644 index 0000000000000..6a0d8cb772adf Binary files /dev/null and b/docs/user/dashboard/images/vega.png differ diff --git a/docs/user/dashboard/share-dashboards.asciidoc b/docs/user/dashboard/share-dashboards.asciidoc new file mode 100644 index 0000000000000..cfa146d60fdac --- /dev/null +++ b/docs/user/dashboard/share-dashboards.asciidoc @@ -0,0 +1,27 @@ +[[share-dashboards]] +== Share dashboards + +[[embedding-dashboards]] +Share your dashboard outside of {kib}. + +From the *Share* menu, you can: + +* Embed the code in a web page. Users must have {kib} access +to view an embedded dashboard. +* Share a direct link to a {kib} dashboard +* Generate a PDF report +* Generate a PNG report + +TIP: To create a link to a dashboard by title, use: + +`${domain}/${basepath?}/app/dashboards#/list?title=${yourdashboardtitle}` + +TIP: When sharing a link to a dashboard snapshot, use the *Short URL*. Snapshot +URLs are long and can be problematic for Internet Explorer and other +tools. To create a short URL, you must have write access to {kib}. + +[float] +[[import-dashboards]] +=== Export the dashboard + +To export the dashboard, open the menu, then click *Stack Management > Saved Objects*. For more information, +refer to <>. \ No newline at end of file diff --git a/docs/user/dashboard/tutorials.asciidoc b/docs/user/dashboard/tutorials.asciidoc new file mode 100644 index 0000000000000..931720ccbe257 --- /dev/null +++ b/docs/user/dashboard/tutorials.asciidoc @@ -0,0 +1,1756 @@ +[[tutorials]] +== Tutorials + +Learn how to use *Lens*, *Vega*, and *Timelion* by going through one of the step-by-step tutorials. + +[[lens-tutorial]] +=== Compare sales over time with Lens + +Ready to create your own visualization with Lens? Use the following tutorial to create a visualization that lets you compare sales over time. + +[float] +[[lens-before-begin]] +==== Before you begin + +To start, you'll need to add the <>. + +[float] +==== Build the visualization + +Drag and drop your data onto the visualization builder pane. + +. Select the *kibana_sample_data_ecommerce* index pattern. + +. Click image:images/time-filter-calendar.png[], then click *Last 7 days*. ++ +The fields in the data panel update. + +. Drag and drop the *taxful_total_price* data field to the visualization builder pane. ++ +[role="screenshot"] +image::images/lens_tutorial_1.png[Lens tutorial] + +To display the average order prices over time, *Lens* automatically added in *order_date* field. + +To break down your data, drag the *category.keyword* field to the visualization builder pane. Lens +knows that you want to show the top categories and compare them across the dates, +and creates a chart that compares the sales for each of the top three categories: + +[role="screenshot"] +image::images/lens_tutorial_2.png[Lens tutorial] + +[float] +[[customize-lens-visualization]] +==== Customize your visualization + +Make your visualization look exactly how you want with the customization options. + +. Click *Average of taxful_total_price*, then change the *Label* to `Sales`. ++ +[role="screenshot"] +image::images/lens_tutorial_3.1.png[Lens tutorial] + +. Click *Top values of category.keyword*, then change *Number of values* to `10`. ++ +[role="screenshot"] +image::images/lens_tutorial_3.2.png[Lens tutorial] ++ +The visualization updates to show there are only six available categories. ++ +Look at the *Suggestions*. An area chart is not an option, but for the sales data, a stacked area chart might be the best option. + +. To switch the chart type, click *Stacked bar chart* in the column, then click *Stacked area* from the *Select a visualizations* window. ++ +[role="screenshot"] +image::images/lens_tutorial_3.png[Lens tutorial] + +[float] +[[lens-tutorial-next-steps]] +==== Next steps + +Now that you've created your visualization, you can add it to a <> or <>. + +[[vega-lite-tutorial-create-your-first-visualizations]] +=== Create your first visualization with Vega-Lite + +experimental[] + +In this tutorial, you will learn about how to edit Vega-Lite in {kib} to create +a stacked area chart from an {es} search query. It will give you a starting point +for a more comprehensive +https://vega.github.io/vega-lite/tutorials/getting_started.html[introduction to Vega-Lite], +while only covering the basics. + +In this tutorial, you will build a stacked area chart from one of the {kib} sample data +sets. + +[role="screenshot"] +image::visualize/images/vega_lite_tutorial_1.png[] + +Before beginning this tutorial, install the <> +set. + +When you first open the Vega editor in {kib}, you will see a pre-populated +line chart which shows the total number of documents across all your indices +within the time range. + +[role="screenshot"] +image::visualize/images/vega_lite_default.png[] + +The text editor contains a Vega-Lite spec written in https://hjson.github.io/[HJSON], +which is similar to JSON but optimized for human editing. HJSON supports: + +* Comments using // or /* syntax +* Object keys without quotes +* String values without quotes +* Optional commas +* Double or single quotes +* Multiline strings + +[float] +[[small-steps]] +==== Small steps + +Always work on Vega in the smallest steps possible, and save your work frequently. +Small changes will cause unexpected results. Click the "Save" button now. + +The first step is to change the index to one of the <> +sets. Change + +```yaml +index: _all +``` + +to: + +```yaml +index: kibana_sample_data_ecommerce +``` + +Click "Update". The result is probably not what you expect. You should see a flat +line with 0 results. + +You've only changed the index, so the difference must be the query is returning +no results. You can try the <>, +but intuition may be faster for this particular problem. + +In this case, the problem is that you are querying the field `@timestamp`, +which does not exist in the `kibana_sample_data_ecommerce` data. Find and replace +`@timestamp` with `order_date`. This fixes the problem, leaving you with this spec: + +.Expand Vega-Lite spec +[%collapsible%closed] +==== +[source,yaml] +---- +{ + $schema: https://vega.github.io/schema/vega-lite/v4.json + title: Event counts from ecommerce + data: { + url: { + %context%: true + %timefield%: order_date + index: kibana_sample_data_ecommerce + body: { + aggs: { + time_buckets: { + date_histogram: { + field: order_date + interval: {%autointerval%: true} + extended_bounds: { + min: {%timefilter%: "min"} + max: {%timefilter%: "max"} + } + min_doc_count: 0 + } + } + } + size: 0 + } + } + format: {property: "aggregations.time_buckets.buckets" } + } + + mark: line + + encoding: { + x: { + field: key + type: temporal + axis: { title: null } + } + y: { + field: doc_count + type: quantitative + axis: { title: "Document count" } + } + } +} +---- + +==== + +Now, let's make the visualization more interesting by adding another aggregation +to create a stacked area chart. To verify that you have constructed the right +query, it is easiest to use the {kib} Dev Tools in a separate tab from the +Vega editor. Open the Dev Tools from the Management section of the navigation. + +This query is roughly equivalent to the one that is used in the default +Vega-Lite spec. Copy it into the Dev Tools: + +```js +POST kibana_sample_data_ecommerce/_search +{ + "query": { + "range": { + "order_date": { + "gte": "now-7d" + } + } + }, + "aggs": { + "time_buckets": { + "date_histogram": { + "field": "order_date", + "fixed_interval": "1d", + "extended_bounds": { + "min": "now-7d" + }, + "min_doc_count": 0 + } + } + }, + "size": 0 +} +``` + +There's not enough data to create a stacked bar in the original query, so we +will add a new +{ref}/search-aggregations-bucket-terms-aggregation.html[terms aggregation]: + +```js +POST kibana_sample_data_ecommerce/_search +{ + "query": { + "range": { + "order_date": { + "gte": "now-7d" + } + } + }, + "aggs": { + "categories": { + "terms": { "field": "category.keyword" }, + "aggs": { + "time_buckets": { + "date_histogram": { + "field": "order_date", + "fixed_interval": "1d", + "extended_bounds": { + "min": "now-7d" + }, + "min_doc_count": 0 + } + } + } + } + }, + "size": 0 +} +``` + +You'll see that the response format looks different from the previous query: + +```json +{ + "aggregations" : { + "categories" : { + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0, + "buckets" : [{ + "key" : "Men's Clothing", + "doc_count" : 1661, + "time_buckets" : { + "buckets" : [{ + "key_as_string" : "2020-06-30T00:00:00.000Z", + "key" : 1593475200000, + "doc_count" : 19 + }, { + "key_as_string" : "2020-07-01T00:00:00.000Z", + "key" : 1593561600000, + "doc_count" : 71 + }] + } + }] + } + } +} +``` + +Now that we have data that we're happy with, it's time to convert from an +isolated {es} query into a query with {kib} integration. Looking at the +<>, you will +see the full list of special tokens that are used in this query, such +as `%context: true`. This query has also replaced `"fixed_interval": "1d"` +with `interval: {%autointerval%: true}`. Copy the final query into +your spec: + +```yaml + data: { + url: { + %context%: true + %timefield%: order_date + index: kibana_sample_data_ecommerce + body: { + aggs: { + categories: { + terms: { field: "category.keyword" } + aggs: { + time_buckets: { + date_histogram: { + field: order_date + interval: {%autointerval%: true} + extended_bounds: { + min: {%timefilter%: "min"} + max: {%timefilter%: "max"} + } + min_doc_count: 0 + } + } + } + } + } + size: 0 + } + } + format: {property: "aggregations.categories.buckets" } + } +``` + +If you copy and paste that into your Vega-Lite spec, and click "Update", +you will see a warning saying `Infinite extent for field "key": [Infinity, -Infinity]`. +Let's use our <> to understand why. + +Vega-Lite generates data using the names `source_0` and `data_0`. `source_0` contains +the results from the {es} query, and `data_0` contains the visually encoded results +which are shown in the chart. To debug this problem, you need to compare both. + +To look at the source, open the browser dev tools console and type +`VEGA_DEBUG.view.data('source_0')`. You will see: + +```js +[{ + doc_count: 454 + key: "Men's Clothing" + time_buckets: {buckets: Array(57)} + Symbol(vega_id): 12822 +}, ...] +``` + +To compare to the visually encoded data, open the browser dev tools console and type +`VEGA_DEBUG.view.data('data_0')`. You will see: + +```js +[{ + doc_count: 454 + key: NaN + time_buckets: {buckets: Array(57)} + Symbol(vega_id): 13879 +}] +``` + +The issue seems to be that the `key` property is not being converted the right way, +which makes sense because the `key` is now `Men's Clothing` instead of a timestamp. + +To fix this, try updating the `encoding` of your Vega-Lite spec to: + +```yaml + encoding: { + x: { + field: time_buckets.buckets.key + type: temporal + axis: { title: null } + } + y: { + field: time_buckets.buckets.doc_count + type: quantitative + axis: { title: "Document count" } + } + } +``` + +This will show more errors, and you can inspect `VEGA_DEBUG.view.data('data_0')` to +understand why. This now shows: + +```js +[{ + doc_count: 454 + key: "Men's Clothing" + time_buckets: {buckets: Array(57)} + time_buckets.buckets.doc_count: undefined + time_buckets.buckets.key: null + Symbol(vega_id): 14094 +}] +``` + +It looks like the problem is that the `time_buckets` inner array is not being +extracted by Vega. The solution is to use a Vega-lite +https://vega.github.io/vega-lite/docs/flatten.html[flatten transformation], available in {kib} 7.9 and later. +If using an older version of Kibana, the flatten transformation is available in Vega +but not Vega-Lite. + +Add this section in between the `data` and `encoding` section: + +```yaml + transform: [{ + flatten: ["time_buckets.buckets"] + }] +``` + +This does not yet produce the results you expect. Inspect the transformed data +by typing `VEGA_DEBUG.view.data('data_0')` into the console again: + +```js +[{ + doc_count: 453 + key: "Men's Clothing" + time_bucket.buckets.doc_count: undefined + time_buckets: {buckets: Array(57)} + time_buckets.buckets: { + key_as_string: "2020-06-30T15:00:00.000Z", + key: 1593529200000, + doc_count: 2 + } + time_buckets.buckets.key: null + Symbol(vega_id): 21564 +}] +``` + +The debug view shows `undefined` values where you would expect to see numbers, and +the cause is that there are duplicate names which are confusing Vega-Lite. This can +be fixed by making this change to the `transform` and `encoding` blocks: + +```yaml + transform: [{ + flatten: ["time_buckets.buckets"], + as: ["buckets"] + }] + + mark: area + + encoding: { + x: { + field: buckets.key + type: temporal + axis: { title: null } + } + y: { + field: buckets.doc_count + type: quantitative + axis: { title: "Document count" } + } + color: { + field: key + type: nominal + } + } +``` + +At this point, you have a stacked area chart that shows the top categories, +but the chart is still missing some common features that we expect from a {kib} +visualization. Let's add hover states and tooltips next. + +Hover states are handled differently in Vega-Lite and Vega. In Vega-Lite this is +done using a concept called `selection`, which has many permutations that are not +covered in this tutorial. We will be adding a simple tooltip and hover state. + +Because {kib} has enabled the https://vega.github.io/vega-lite/docs/tooltip.html[Vega tooltip plugin], +tooltips can be defined in several ways: + +* Automatic tooltip based on the data, via `{ content: "data" }` +* Array of fields, like `[{ field: "key", type: "nominal" }]` +* Defining a custom Javascript object using the `calculate` transform + +For the simple tooltip, add this to your encoding: + +```yaml + encoding: { + tooltip: [{ + field: buckets.key + type: temporal + title: "Date" + }, { + field: key + type: nominal + title: "Category" + }, { + field: buckets.doc_count + type: quantitative + title: "Count" + }] + } +``` + +As you hover over the area series in your chart, a multi-line tooltip will +appear, but it won't indicate the nearest point that it's pointing to. To +indicate the nearest point, we need to add a second layer. + +The first step is to remove the `mark: area` from your visualization. +Once you've removed the previous mark, add a composite mark at the end of +the Vega-Lite spec: + +```yaml + layer: [{ + mark: area + }, { + mark: point + }] +``` + +You'll see that the points are not appearing to line up with the area chart, +and the reason is that the points are not being stacked. Change your Y encoding +to this: + +```yaml + y: { + field: buckets.doc_count + type: quantitative + axis: { title: "Document count" } + stack: true + } +``` + +Now, we will add a `selection` block inside the point mark: + +```yaml + layer: [{ + mark: area + }, { + mark: point + + selection: { + pointhover: { + type: single + on: mouseover + clear: mouseout + empty: none + fields: ["buckets.key", "key"] + nearest: true + } + } + + encoding: { + size: { + condition: { + selection: pointhover + value: 100 + } + value: 5 + } + fill: { + condition: { + selection: pointhover + value: white + } + } + } + }] +``` + +Now that you've enabled a selection, try moving the mouse around the visualization +and seeing the points respond to the nearest position: + +[role="screenshot"] +image::visualize/images/vega_lite_tutorial_2.png[] + +The final result of this tutorial is this spec: + +.Expand final Vega-Lite spec +[%collapsible%closed] +==== +[source,yaml] +---- +{ + $schema: https://vega.github.io/schema/vega-lite/v4.json + title: Event counts from ecommerce + data: { + url: { + %context%: true + %timefield%: order_date + index: kibana_sample_data_ecommerce + body: { + aggs: { + categories: { + terms: { field: "category.keyword" } + aggs: { + time_buckets: { + date_histogram: { + field: order_date + interval: {%autointerval%: true} + extended_bounds: { + min: {%timefilter%: "min"} + max: {%timefilter%: "max"} + } + min_doc_count: 0 + } + } + } + } + } + size: 0 + } + } + format: {property: "aggregations.categories.buckets" } + } + + transform: [{ + flatten: ["time_buckets.buckets"] + as: ["buckets"] + }] + + encoding: { + x: { + field: buckets.key + type: temporal + axis: { title: null } + } + y: { + field: buckets.doc_count + type: quantitative + axis: { title: "Document count" } + stack: true + } + color: { + field: key + type: nominal + title: "Category" + } + tooltip: [{ + field: buckets.key + type: temporal + title: "Date" + }, { + field: key + type: nominal + title: "Category" + }, { + field: buckets.doc_count + type: quantitative + title: "Count" + }] + } + + layer: [{ + mark: area + }, { + mark: point + + selection: { + pointhover: { + type: single + on: mouseover + clear: mouseout + empty: none + fields: ["buckets.key", "key"] + nearest: true + } + } + + encoding: { + size: { + condition: { + selection: pointhover + value: 100 + } + value: 5 + } + fill: { + condition: { + selection: pointhover + value: white + } + } + } + }] +} +---- + +==== + +[[vega-tutorial-update-kibana-filters-from-vega]] +=== Update {kib} filters from Vega + +experimental[] + +In this tutorial you will build an area chart in Vega using an {es} search query, +and add a click handler and drag handler to update {kib} filters. +This tutorial is not a full https://vega.github.io/vega/tutorials/[Vega tutorial], +but will cover the basics of creating Vega visualizations into {kib}. + +First, create an almost-blank Vega chart by pasting this into the editor: + +```yaml +{ + $schema: "https://vega.github.io/schema/vega/v5.json" + data: [{ + name: source_0 + }] + + scales: [{ + name: x + type: time + range: width + }, { + name: y + type: linear + range: height + }] + + axes: [{ + orient: bottom + scale: x + }, { + orient: left + scale: y + }] + + marks: [ + { + type: area + from: { + data: source_0 + } + encode: { + update: { + } + } + } + ] +} +``` + +Despite being almost blank, this Vega spec still contains the minimum requirements: + +* Data +* Scales +* Marks +* (optional) Axes + +Next, add a valid {es} search query in the `data` block: + +```yaml + data: [ + { + name: source_0 + url: { + %context%: true + %timefield%: order_date + index: kibana_sample_data_ecommerce + body: { + aggs: { + time_buckets: { + date_histogram: { + field: order_date + fixed_interval: "3h" + extended_bounds: { + min: {%timefilter%: "min"} + max: {%timefilter%: "max"} + } + min_doc_count: 0 + } + } + } + size: 0 + } + } + format: { property: "aggregations.time_buckets.buckets" } + } + ] +``` + +Click "Update", and nothing will change in the visualization. The first step +is to change the X and Y scales based on the data: + +```yaml + scales: [{ + name: x + type: time + range: width + domain: { + data: source_0 + field: key + } + }, { + name: y + type: linear + range: height + domain: { + data: source_0 + field: doc_count + } + }] +``` + +Click "Update", and you will see that the X and Y axes are now showing labels based +on the real data. + +Next, encode the fields `key` and `doc_count` as the X and Y values: + +```yaml + marks: [ + { + type: area + from: { + data: source_0 + } + encode: { + update: { + x: { + scale: x + field: key + } + y: { + scale: y + value: 0 + } + y2: { + scale: y + field: doc_count + } + } + } + } + ] +``` + +Click "Update" and you will get a basic area chart: + +[role="screenshot"] +image::visualize/images/vega_tutorial_3.png[] + +Next, add a new block to the `marks` section. This will show clickable points to filter for a specific +date: + +```yaml + { + name: point + type: symbol + style: ["point"] + from: { + data: source_0 + } + encode: { + update: { + x: { + scale: x + field: key + } + y: { + scale: y + field: doc_count + } + size: { + value: 100 + } + fill: { + value: black + } + } + } + } +``` + +Next, we will create a Vega signal to make the points clickable. You can access +the clicked `datum` in the expression used to update. In this case, you want +clicks on points to add a time filter with the 3-hour interval defined above. + +```yaml + signals: [ + { + name: point_click + on: [{ + events: { + source: scope + type: click + markname: point + } + update: '''kibanaSetTimeFilter(datum.key, datum.key + 3 * 60 * 60 * 1000)''' + }] + } + ] +``` + +This event is using the {kib} custom function `kibanaSetTimeFilter` to generate a filter that +gets applied to the entire dashboard on click. + +The mouse cursor does not currently indicate that the chart is interactive. Find the `marks` section, +and update the mark named `point` by adding `cursor: { value: "pointer" }` to +the `encoding` section like this: + +```yaml + { + name: point + type: symbol + style: ["point"] + from: { + data: source_0 + } + encode: { + update: { + ... + cursor: { value: "pointer" } + } + } + } +``` + +Next, we will add a drag interaction which will allow the user to narrow into +a specific time range in the visualization. This will require adding more signals, and +adding a rectangle overlay: + +[role="screenshot"] +image::visualize/images/vega_tutorial_4.png[] + +The first step is to add a new `signal` to track the X position of the cursor: + +```yaml + { + name: currentX + value: -1 + on: [{ + events: { + type: mousemove + source: view + }, + update: "clamp(x(), 0, width)" + }, { + events: { + type: mouseout + source: view + } + update: "-1" + }] + } +``` + +Now add a new `mark` to indicate the current cursor position: + +```yaml + { + type: rule + interactive: false + encode: { + update: { + y: {value: 0} + y2: {signal: "height"} + stroke: {value: "gray"} + strokeDash: { + value: [2, 1] + } + x: {signal: "max(currentX,0)"} + defined: {signal: "currentX > 0"} + } + } + } +``` + +Next, add a signal to track the current selected range, which will update +until the user releases the mouse button or uses the escape key: + + +```yaml + { + name: selected + value: [0, 0] + on: [{ + events: { + type: mousedown + source: view + } + update: "[clamp(x(), 0, width), clamp(x(), 0, width)]" + }, { + events: { + type: mousemove + source: window + consume: true + between: [{ + type: mousedown + source: view + }, { + merge: [{ + type: mouseup + source: window + }, { + type: keydown + source: window + filter: "event.key === 'Escape'" + }] + }] + } + update: "[selected[0], clamp(x(), 0, width)]" + }, { + events: { + type: keydown + source: window + filter: "event.key === 'Escape'" + } + update: "[0, 0]" + }] + } +``` + +Now that there is a signal which tracks the time range from the user, we need to indicate +the range visually by adding a new mark which only appears conditionally: + +```yaml + { + type: rect + name: selectedRect + encode: { + update: { + height: {signal: "height"} + fill: {value: "#333"} + fillOpacity: {value: 0.2} + x: {signal: "selected[0]"} + x2: {signal: "selected[1]"} + defined: {signal: "selected[0] !== selected[1]"} + } + } + } +``` + +Finally, add a new signal which will update the {kib} time filter when the mouse is released while +dragging: + +```yaml + { + name: applyTimeFilter + value: null + on: [{ + events: { + type: mouseup + source: view + } + update: '''selected[0] !== selected[1] ? kibanaSetTimeFilter( + invert('x',selected[0]), + invert('x',selected[1])) : null''' + }] + } +``` + +Putting this all together, your visualization now supports the main features of +standard visualizations in {kib}, but with the potential to add even more control. +The final Vega spec for this tutorial is here: + +.Expand final Vega spec +[%collapsible%closed] +==== +[source,yaml] +---- +{ + $schema: "https://vega.github.io/schema/vega/v5.json" + data: [ + { + name: source_0 + url: { + %context%: true + %timefield%: order_date + index: kibana_sample_data_ecommerce + body: { + aggs: { + time_buckets: { + date_histogram: { + field: order_date + fixed_interval: "3h" + extended_bounds: { + min: {%timefilter%: "min"} + max: {%timefilter%: "max"} + } + min_doc_count: 0 + } + } + } + size: 0 + } + } + format: { property: "aggregations.time_buckets.buckets" } + } + ] + + scales: [{ + name: x + type: time + range: width + domain: { + data: source_0 + field: key + } + }, { + name: y + type: linear + range: height + domain: { + data: source_0 + field: doc_count + } + }] + + axes: [{ + orient: bottom + scale: x + }, { + orient: left + scale: y + }] + + marks: [ + { + type: area + from: { + data: source_0 + } + encode: { + update: { + x: { + scale: x + field: key + } + y: { + scale: y + value: 0 + } + y2: { + scale: y + field: doc_count + } + } + } + }, + { + name: point + type: symbol + style: ["point"] + from: { + data: source_0 + } + encode: { + update: { + x: { + scale: x + field: key + } + y: { + scale: y + field: doc_count + } + size: { + value: 100 + } + fill: { + value: black + } + cursor: { value: "pointer" } + } + } + }, + { + type: rule + interactive: false + encode: { + update: { + y: {value: 0} + y2: {signal: "height"} + stroke: {value: "gray"} + strokeDash: { + value: [2, 1] + } + x: {signal: "max(currentX,0)"} + defined: {signal: "currentX > 0"} + } + } + }, + { + type: rect + name: selectedRect + encode: { + update: { + height: {signal: "height"} + fill: {value: "#333"} + fillOpacity: {value: 0.2} + x: {signal: "selected[0]"} + x2: {signal: "selected[1]"} + defined: {signal: "selected[0] !== selected[1]"} + } + } + } + ] + + signals: [ + { + name: point_click + on: [{ + events: { + source: scope + type: click + markname: point + } + update: '''kibanaSetTimeFilter(datum.key, datum.key + 3 * 60 * 60 * 1000)''' + }] + } + { + name: currentX + value: -1 + on: [{ + events: { + type: mousemove + source: view + }, + update: "clamp(x(), 0, width)" + }, { + events: { + type: mouseout + source: view + } + update: "-1" + }] + } + { + name: selected + value: [0, 0] + on: [{ + events: { + type: mousedown + source: view + } + update: "[clamp(x(), 0, width), clamp(x(), 0, width)]" + }, { + events: { + type: mousemove + source: window + consume: true + between: [{ + type: mousedown + source: view + }, { + merge: [{ + type: mouseup + source: window + }, { + type: keydown + source: window + filter: "event.key === 'Escape'" + }] + }] + } + update: "[selected[0], clamp(x(), 0, width)]" + }, { + events: { + type: keydown + source: window + filter: "event.key === 'Escape'" + } + update: "[0, 0]" + }] + } + { + name: applyTimeFilter + value: null + on: [{ + events: { + type: mouseup + source: view + } + update: '''selected[0] !== selected[1] ? kibanaSetTimeFilter( + invert('x',selected[0]), + invert('x',selected[1])) : null''' + }] + } + ] +} + +---- +==== + +[[timelion-tutorial-create-time-series-visualizations]] +=== Create time series visualizations with Timelion + +To compare the real-time percentage of CPU time spent in user space to the results offset by one hour, create a time series visualization. + +[float] +[[define-the-functions]] +==== Define the functions + +To start tracking the real-time percentage of CPU, enter the following in the *Timelion Expression* field: + +[source,text] +---------------------------------- +.es(index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') +---------------------------------- + +[role="screenshot"] +image::images/timelion-create01.png[] +{nbsp} + +[float] +[[compare-the-data]] +==== Compare the data + +To compare the two data sets, add another series with data from the previous hour, separated by a comma: + +[source,text] +---------------------------------- +.es(index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct'), +.es(offset=-1h, <1> + index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') +---------------------------------- + +<1> `offset` offsets the data retrieval by a date expression. In this example, `-1h` offsets the data back by one hour. + +[role="screenshot"] +image::images/timelion-create02.png[] +{nbsp} + +[float] +[[add-label-names]] +==== Add label names + +To easily distinguish between the two data sets, add the label names: + +[source,text] +---------------------------------- +.es(offset=-1h,index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct').label('last hour'), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct').label('current hour') <1> +---------------------------------- + +<1> `.label()` adds custom labels to the visualization. + +[role="screenshot"] +image::images/timelion-create03.png[] +{nbsp} + +[float] +[[add-a-title]] +==== Add a title + +Add a meaningful title: + +[source,text] +---------------------------------- +.es(offset=-1h, + index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('last hour'), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('current hour') + .title('CPU usage over time') <1> +---------------------------------- + +<1> `.title()` adds a title with a meaningful name. Titles make is easier for unfamiliar users to understand the purpose of the visualization. + +[role="screenshot"] +image::images/timelion-customize01.png[] +{nbsp} + +[float] +[[change-the-chart-type]] +==== Change the chart type + +To differentiate between the current hour data and the last hour data, change the chart type: + +[source,text] +---------------------------------- +.es(offset=-1h, + index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('last hour') + .lines(fill=1,width=0.5), <1> +.es(index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('current hour') + .title('CPU usage over time') +---------------------------------- + +<1> `.lines()` changes the appearance of the chart lines. In this example, `.lines(fill=1,width=0.5)` sets the fill level to `1`, and the border width to `0.5`. + +[role="screenshot"] +image::images/timelion-customize02.png[] +{nbsp} + +[float] +[[change-the-line-colors]] +==== Change the line colors + +To make the current hour data stand out, change the line colors: + +[source,text] +---------------------------------- +.es(offset=-1h, + index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('last hour') + .lines(fill=1,width=0.5) + .color(gray), <1> +.es(index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('current hour') + .title('CPU usage over time') + .color(#1E90FF) +---------------------------------- + +<1> `.color()` changes the color of the data. Supported color types include standard color names, hexadecimal values, or a color schema for grouped data. In this example, `.color(gray)` represents the last hour, and `.color(#1E90FF)` represents the current hour. + +[role="screenshot"] +image::images/timelion-customize03.png[] +{nbsp} + +[float] +[[make-adjustments-to-the-legend]] +==== Make adjustments to the legend + +Change the position and style of the legend: + +[source,text] +---------------------------------- +.es(offset=-1h, + index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('last hour') + .lines(fill=1,width=0.5) + .color(gray), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='avg:system.cpu.user.pct') + .label('current hour') + .title('CPU usage over time') + .color(#1E90FF) + .legend(columns=2, position=nw) <1> +---------------------------------- + +<1> `.legend()` sets the position and style of the legend. In this example, `.legend(columns=2, position=nw)` places the legend in the north west position of the visualization with two columns. + +[role="screenshot"] +image::images/timelion-customize04.png[] +{nbsp} + +[[timelion-tutorial-create-visualizations-with-mathematical-functions]] +=== Timelion tutorial: Create visualizations with mathematical functions + +To create a visualization for inbound and outbound network traffic, use mathematical functions. + +[float] +[[mathematical-functions-define-functions]] +==== Define the functions + +To start tracking the inbound and outbound network traffic, enter the following in the *Timelion Expression* field: + +[source,text] +---------------------------------- +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.in.bytes) +---------------------------------- + +[role="screenshot"] +image::images/timelion-math01.png[] +{nbsp} + +[float] +[[mathematical-functions-plot-change]] +==== Plot the rate of change + +Change how the data is displayed so that you can easily monitor the inbound traffic: + +[source,text] +---------------------------------- +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.in.bytes) + .derivative() <1> +---------------------------------- + +<1> `.derivative` plots the change in values over time. + +[role="screenshot"] +image::images/timelion-math02.png[] +{nbsp} + +Add a similar calculation for outbound traffic: + +[source,text] +---------------------------------- +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.in.bytes) + .derivative(), +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.out.bytes) + .derivative() + .multiply(-1) <1> +---------------------------------- + +<1> `.multiply()` multiplies the data series by a number, the result of a data series, or a list of data series. For this example, `.multiply(-1)` converts the outbound network traffic to a negative value since the outbound network traffic is leaving your machine. + +[role="screenshot"] +image::images/timelion-math03.png[] +{nbsp} + +[float] +[[mathematical-functions-convert-data]] +==== Change the data metric + +To make the visualization easier to analyze, change the data metric from bytes to megabytes: + +[source,text] +---------------------------------- +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.in.bytes) + .derivative() + .divide(1048576), +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.out.bytes) + .derivative() + .multiply(-1) + .divide(1048576) <1> +---------------------------------- + +<1> `.divide()` accepts the same input as `.multiply()`, then divides the data series by the defined divisor. + +[role="screenshot"] +image::images/timelion-math04.png[] +{nbsp} + +[float] +[[mathematical-functions-add-labels]] +==== Customize and format the visualization + +Customize and format the visualization using functions: + +[source,text] +---------------------------------- +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.in.bytes) + .derivative() + .divide(1048576) + .lines(fill=2, width=1) + .color(green) + .label("Inbound traffic") <1> + .title("Network traffic (MB/s)"), <2> +.es(index=metricbeat*, + timefield=@timestamp, + metric=max:system.network.out.bytes) + .derivative() + .multiply(-1) + .divide(1048576) + .lines(fill=2, width=1) <3> + .color(blue) <4> + .label("Outbound traffic") + .legend(columns=2, position=nw) <5> +---------------------------------- + +<1> `.label()` adds custom labels to the visualization. +<2> `.title()` adds a title with a meaningful name. +<3> `.lines()` changes the appearance of the chart lines. In this example, `.lines(fill=2, width=1)` sets the fill level to `2`, and the border width to `1`. +<4> `.color()` changes the color of the data. Supported color types include standard color names, hexadecimal values, or a color schema for grouped data. In this example, `.color(green)` represents the inbound network traffic, and `.color(blue)` represents the outbound network traffic. +<5> `.legend()` sets the position and style of the legend. For this example, `legend(columns=2, position=nw)` places the legend in the north west position of the visualization with two columns. + +[role="screenshot"] +image::images/timelion-math05.png[] +{nbsp} + +[[timelion-tutorial-create-visualizations-withconditional-logic-and-tracking-trends]] +=== Create visualizations with conditional logic and tracking trends using Timelion + +To easily detect outliers and discover patterns over time, modify time series data with conditional logic and create a trend with a moving average. + +With Timelion conditional logic, you can use the following operator values to compare your data: + +[horizontal] +`eq`:: equal +`ne`:: not equal +`lt`:: less than +`lte`:: less than or equal to +`gt`:: greater than +`gte`:: greater than or equal to + +[float] +[[conditional-define-functions]] +==== Define the functions + +To chart the maximum value of `system.memory.actual.used.bytes`, enter the following in the *Timelion Expression* field: + +[source,text] +---------------------------------- +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') +---------------------------------- + +[role="screenshot"] +image::images/timelion-conditional01.png[] +{nbsp} + +[float] +[[conditional-track-memory]] +==== Track used memory + +To track the amount of memory used, create two thresholds: + +[source,text] +---------------------------------- +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .if(gt, <1> + 11300000000, <2> + .es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), + null) + .label('warning') + .color('#FFCC11'), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .if(gt, + 11375000000, + .es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), + null) + .label('severe') + .color('red') +---------------------------------- + +<1> Timelion conditional logic for the _greater than_ operator. In this example, the warning threshold is 11.3GB (`11300000000`), and the severe threshold is 11.375GB (`11375000000`). If the threshold values are too high or low for your machine, adjust the values accordingly. +<2> `if()` compares each point to a number. If the condition evaluates to `true`, adjust the styling. If the condition evaluates to `false`, use the default styling. + +[role="screenshot"] +image::images/timelion-conditional02.png[] +{nbsp} + +[float] +[[conditional-determine-trend]] +==== Determine the trend + +To determine the trend, create a new data series: + +[source,text] +---------------------------------- +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .if(gt,11300000000, + .es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), + null) + .label('warning') + .color('#FFCC11'), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .if(gt,11375000000, + .es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), + null). + label('severe') + .color('red'), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .mvavg(10) <1> +---------------------------------- + +<1> `mvavg()` calculates the moving average over a specified period of time. In this example, `.mvavg(10)` creates a moving average with a window of 10 data points. + +[role="screenshot"] +image::images/timelion-conditional03.png[] +{nbsp} + +[float] +[[conditional-format-visualization]] +==== Customize and format the visualization + +Customize and format the visualization using functions: + +[source,text] +---------------------------------- +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .label('max memory') <1> + .title('Memory consumption over time'), <2> +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .if(gt, + 11300000000, + .es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), + null) + .label('warning') + .color('#FFCC11') <3> + .lines(width=5), <4> +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .if(gt, + 11375000000, + .es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes'), + null) + .label('severe') + .color('red') + .lines(width=5), +.es(index=metricbeat-*, + timefield='@timestamp', + metric='max:system.memory.actual.used.bytes') + .mvavg(10) + .label('mvavg') + .lines(width=2) + .color(#5E5E5E) + .legend(columns=4, position=nw) <5> +---------------------------------- + +<1> `.label()` adds custom labels to the visualization. +<2> `.title()` adds a title with a meaningful name. +<3> `.color()` changes the color of the data. Supported color types include standard color names, hexadecimal values, or a color schema for grouped data. +<4> `.lines()` changes the appearance of the chart lines. In this example, .lines(width=5) sets border width to `5`. +<5> `.legend()` sets the position and style of the legend. For this example, `(columns=4, position=nw)` places the legend in the north west position of the visualization with four columns. + +[role="screenshot"] +image::images/timelion-conditional04.png[] +{nbsp} + +For additional information on Timelion conditional capabilities, go to https://www.elastic.co/blog/timeseries-if-then-else-with-timelion[I have but one .condition()]. \ No newline at end of file diff --git a/docs/user/dashboard/url-drilldown.asciidoc b/docs/user/dashboard/url-drilldown.asciidoc new file mode 100644 index 0000000000000..16f82477756b7 --- /dev/null +++ b/docs/user/dashboard/url-drilldown.asciidoc @@ -0,0 +1,221 @@ +[[url-drilldown]] +=== URL drilldown + +The URL drilldown allows you to navigate from a dashboard to an internal or external URL. +The destination URL can be dynamic, depending on the dashboard context or user’s interaction with a visualization. + +For example, you might have a dashboard that shows data from a Github repository. +You can create a drilldown that navigates from this dashboard to Github. + +[role="screenshot"] +image:images/url_drilldown_go_to_github.gif[Drilldown on pie chart that navigates to Github] + +NOTE: URL drilldown is available with the https://www.elastic.co/subscriptions[Gold subscription] and higher. + +[float] +[[try-it]] +==== Try it: Create a URL drilldown + +This example shows how to create the "Show on Github" drilldown shown above. + +. Add the <> data set. +. Open the *[Logs] Web traffic* dashboard. This isn’t data from Github, but it should work for demonstration purposes. +. In the dashboard menu bar, click *Edit*. +. In *[Logs] Visitors by OS*, open the panel menu, and then select *Create drilldown*. +. Give the drilldown a name: *Show on Github*. +. Select a drilldown action: *Go to URL*. ++ +[role="screenshot"] +image:images/url_drilldown_pick_an_action.png[Action picker] +. Enter a URL template: ++ +[source, bash] +---- +https://github.com/elastic/kibana/issues?q=is:issue+is:open+{{event.value}} +---- ++ +This example URL navigates to {kib} issues on Github. `{{event.value}}` will be substituted with a value associated with a clicked pie slice. In _preview_ `{{event.value}}` is substituted with a <> value. +[role="screenshot"] +image:images/url_drilldown_url_template.png[URL template input] +. Click *Create drilldown*. +. Save the dashboard. ++ +If you don’t save the drilldown, and then navigate away, the drilldown is lost. + +. In *[Logs] Visitors by OS*, click any slice of the pie, and then select the drilldown *Show on Github*. ++ +[role="screenshot"] +image:images/url_drilldown_popup.png[URL drilldown popup] ++ +You are navigated to the issue list in the {kib} repository. Verify that value from a pie slice you’ve clicked on is carried over to Github. ++ +[role="screenshot"] +image:images/url_drilldown_github.png[Github] + +[float] +[[trigger-picker]] +==== Picking a trigger for a URL drilldown + +Some panels support multiple user interactions (called triggers) for which you can configure a URL drilldown. The list of supported variables in the URL template depends on the trigger you selected. +In the preceding example, you configured a URL drilldown on a pie chart. The only trigger that pie chart supports is clicking on a pie slice, so you didn’t have to pick a trigger. + +However, the sample *[Logs] Unique Visitors vs. Average Bytes* chart supports both clicking on a data point and selecting a range. When you create a URL drilldown for this chart, you have the following choices: + +[role="screenshot"] +image:images/url_drilldown_trigger_picker.png[Trigger picker: Single click and Range selection] + +Variables in the URL template differ per trigger. +For example, *Single click* has `{{event.value}}` and *Range selection* has `{{event.from}}` and `{{event.to}}`. +You can create multiple URL drilldowns per panel and attach them to different triggers. + +[float] +[[templating]] +==== URL templating language + +The URL template input uses Handlebars — a simple templating language. Handlebars templates look like regular text with embedded Handlebars expressions. + +[source, bash] +---- +https://github.com/elastic/kibana/issues?q={{event.value}} +---- + +A Handlebars expression is a `{{`, some contents, followed by a `}}`. When the drilldown is executed, these expressions are replaced by values from the dashboard and interaction context. + +Refer to Handlebars https://handlebarsjs.com/guide/expressions.html#expressions[documentation] to learn about advanced use cases. + +[[helpers]] +In addition to https://handlebarsjs.com/guide/builtin-helpers.html[built-in] Handlebars helpers, you can use the following custom helpers: + + +|=== +|Helper |Use case + +|json +a|Serialize variables in JSON format. + +Example: + +`{{json event}}` + +`{{json event.key event.value}}` + +`{{json filters=context.panel.filters}}` + + +|rison +a|Serialize variables in https://github.com/w33ble/rison-node[rison] format. Rison is a common format for {kib} apps for storing state in the URL. + +Example: + +`{{rison event}}` + +`{{rison event.key event.value}}` + +`{{rison filters=context.panel.filters}}` + + +|date +a|Format dates. Supports relative dates expressions (for example, "now-15d"). Refer to the https://momentjs.com/docs/#/displaying/format/[moment] docs for different formatting options. + +Example: + +`{{ date event.from “YYYY MM DD”}}` + +`{{date “now-15”}}` +|=== + + +[float] +[[variables]] +==== URL template variables + +The URL drilldown template has three sources for variables: + +* *Global* static variables that don’t change depending on the place where the URL drilldown is used or which user interaction executed the drilldown. For example: `{{kibanaUrl}}`. +* *Context* variables that change depending on where the drilldown is created and used. These variables are extracted from a context of a panel on a dashboard. For example, `{{context.panel.filters}}` gives access to filters that applied to the current panel. +* *Event* variables that depend on the trigger context. These variables are dynamically extracted from the interaction context when the drilldown is executed. + +[[values-in-preview]] +A subtle but important difference between *context* and *event* variables is that *context* variables use real values in previews when creating a URL drilldown. +For example, `{{context.panel.filters}}` are previewed with the current filters that applied to a panel. +*Event* variables are extracted during drilldown execution from a user interaction with a panel (for example, from a pie slice that the user clicked on). + +Because there is no user interaction with a panel in preview, there is no interaction context to use in a preview. +To work around this, {kib} provides a sample interaction that relies on a picked <>. +So in a preview, you might notice that `{{event.value}}` is replaced with `{{event.value}}` instead of with a sample from your data. +Such previews can help you make sure that the structure of your URL template is valid. +However, to ensure that the configured URL drilldown works as expected with your data, you have to save the dashboard and test in the panel. + +You can access the full list of variables available for the current panel and selected trigger by clicking *Add variable* in the top-right corner of a URL template input. + +[float] +[[variables-reference]] +==== Variables reference + + +|=== +|Source |Variable |Description + +|*Global* +| kibanaUrl +| {kib} base URL. Useful for creating URL drilldowns that navigate within {kib}. + +| *Context* +| context.panel +| Context provided by current dashboard panel. + +| +| context.panel.id +| ID of a panel. + +| +| context.panel.title +| Title of a panel. + +| +| context.panel.filters +| List of {kib} filters applied to a panel. + +Tip: Use in combination with <> helper for +internal {kib} navigations with carrying over current filters. + +| +| context.panel.query.query +| Current query string. + +| +| context.panel.query.lang +| Current query language. + +| +| context.panel.timeRange.from + +context.panel.timeRange.to +| Current time picker values. + +Tip: Use in combination with <> helper to format date. + +| +| context.panel.timeRange.indexPatternId + +context.panel.timeRange.indexPatternIds +|Index pattern ids used by a panel. + +| +| context.panel.savedObjectId +| ID of saved object behind a panel. + +| *Single click* +| event.value +| Value behind clicked data point. + +| +| event.key +| Field name behind clicked data point + +| +| event.negate +| Boolean, indicating whether clicked data point resulted in negative filter. + +| *Range selection* +| event.from + +event.to +| `from` and `to` values of selected range. Depending on your data, could be either a date or number. + +Tip: Consider using <> helper for date formatting. + +| +| event.key +| Aggregation field behind the selected range, if available. + +|=== diff --git a/docs/user/dashboard/vega-reference.asciidoc b/docs/user/dashboard/vega-reference.asciidoc new file mode 100644 index 0000000000000..eed8d9a35b874 --- /dev/null +++ b/docs/user/dashboard/vega-reference.asciidoc @@ -0,0 +1,437 @@ +[[vega-reference]] +== Vega reference + +experimental[] + +For additional *Vega* and *Vega-Lite* information, refer to the reference sections. + +[float] +[[reference-for-kibana-extensions]] +=== Reference for {kib} extensions + +{kib} has extended Vega and Vega-Lite with extensions that support: + +* Default height and width +* Default theme to match {kib} +* Writing {es} queries using the time range and filters from dashboards +* Using the Elastic Map Service in Vega maps +* Additional tooltip styling +* Advanced setting to enable URL loading from any domain +* Limited debugging support using the browser dev tools +* (Vega only) Expression functions which can update the time range and dashboard filters + +[float] +[[vega-sizing-and-positioning]] +==== Default height and width + +By default, Vega visualizations use the `autosize = { type: 'fit', contains: 'padding' }` layout. +`fit` uses all available space, ignores `width` and `height` values, +and respects the padding values. To override this behavior, change the +`autosize` value. + +[float] +[[vega-theme]] +==== Default theme to match {kib} + +{kib} registers a default https://vega.github.io/vega/docs/schemes/[Vega color scheme] +with the id `elastic`, and sets a default color for each `mark` type. +Override it by providing a different `stroke`, `fill`, or `color` (Vega-Lite) value. + +[float] +[[vega-queries]] +==== Writing {es} queries in Vega + +experimental[] {kib} extends the Vega https://vega.github.io/vega/docs/data/[data] elements +with support for direct {es} queries specified as a `url`. + +Because of this, {kib} is **unable to support dynamically loaded data**, +which would otherwise work in Vega. All data is fetched before it's passed to +the Vega renderer. + +To define an {es} query in Vega, set the `url` to an object. {kib} will parse +the object looking for special tokens that allow your query to integrate with {kib}. +These tokens are: + +* `%context%: true`: Set at the top level, and replaces the `query` section with filters from dashboard +* `%timefield%: `: Set at the top level, integrates the query with the dashboard time filter +* `{%timefilter%: true}`: Replaced by an {es} range query with upper and lower bounds +* `{%timefilter%: "min" | "max"}`: Replaced only by the upper or lower bounds +* `{%timefilter: true, shift: -1, unit: 'hour'}`: Generates a time range query one hour in the past +* `{%autointerval%: true}`: Replaced by the string which contains the automatic {kib} time interval, such as `1h` +* `{%autointerval%: 10}`: Replaced by a string which is approximately dividing the time into 10 ranges, allowing + you to influence the automatic interval +* `"%dashboard_context-must_clause%"`: String replaced by object containing filters +* `"%dashboard_context-filter_clause%"`: String replaced by an object containing filters +* `"%dashboard_context-must_not_clause%"`: String replaced by an object containing filters + +Putting this together, an example query that counts the number of documents in +a specific index: + +[source,yaml] +---- +// An object instead of a string for the URL value +// is treated as a context-aware Elasticsearch query. +url: { + // Specify the time filter. + %timefield%: @timestamp + // Apply dashboard context filters when set + %context%: true + + // Which indexes to search + index: kibana_sample_data_logs + // The body element may contain "aggs" and "query" keys + body: { + aggs: { + time_buckets: { + date_histogram: { + // Use date histogram aggregation on @timestamp field + field: @timestamp <1> + // interval value will depend on the time filter + // Use an integer to set approximate bucket count + interval: { %autointerval%: true } + // Make sure we get an entire range, even if it has no data + extended_bounds: { + min: { %timefilter%: "min" } + max: { %timefilter%: "max" } + } + // Use this for linear (e.g. line, area) graphs + // Without it, empty buckets will not show up + min_doc_count: 0 + } + } + } + // Speed up the response by only including aggregation results + size: 0 + } +} +---- + +<1> `@timestamp` — Filters the time range and breaks it into histogram +buckets. + +The full result includes the following structure: + +[source,yaml] +---- +{ + "aggregations": { + "time_buckets": { + "buckets": [{ + "key_as_string": "2015-11-30T22:00:00.000Z", + "key": 1448920800000,<1> + "doc_count": 28 + }, { + "key_as_string": "2015-11-30T23:00:00.000Z", + "key": 1448924400000, <1> + "doc_count": 330 + }, ... +---- + +<1> `"key"` — The unix timestamp you can use without conversions by the +Vega date expressions. + +For most visualizations, you only need the list of bucket values. To focus on +only the data you need, use `format: {property: "aggregations.time_buckets.buckets"}`. + +Specify a query with individual range and dashboard context. The query is +equivalent to `"%context%": true, "%timefield%": "@timestamp"`, +except that the time range is shifted back by 10 minutes: + +[source,yaml] +---- +{ + body: { + query: { + bool: { + must: [ + // This string will be replaced + // with the auto-generated "MUST" clause + "%dashboard_context-must_clause%" + { + range: { + // apply timefilter (upper right corner) + // to the @timestamp variable + @timestamp: { + // "%timefilter%" will be replaced with + // the current values of the time filter + // (from the upper right corner) + "%timefilter%": true + // Only work with %timefilter% + // Shift current timefilter by 10 units back + shift: 10 + // week, day (default), hour, minute, second + unit: minute + } + } + } + ] + must_not: [ + // This string will be replaced with + // the auto-generated "MUST-NOT" clause + "%dashboard_context-must_not_clause%" + ] + filter: [ + // This string will be replaced + // with the auto-generated "FILTER" clause + "%dashboard_context-filter_clause%" + ] + } + } + } +} +---- + +NOTE: When using `"%context%": true` or defining a value for `"%timefield%"` the body cannot contain a query. To customize the query within the VEGA specification (e.g. add an additional filter, or shift the timefilter), define your query and use the placeholders as in the example above. The placeholders will be replaced by the actual context of the dashboard or visualization once parsed. + +The `"%timefilter%"` can also be used to specify a single min or max +value. The date_histogram's `extended_bounds` can be set +with two values - min and max. Instead of hardcoding a value, you may +use `"min": {"%timefilter%": "min"}`, which will be replaced with the +beginning of the current time range. The `shift` and `unit` values are +also supported. The `"interval"` can also be set dynamically, depending +on the currently picked range: `"interval": {"%autointerval%": 10}` will +try to get about 10-15 data points (buckets). + +[float] +[[vega-esmfiles]] +=== Access Elastic Map Service files + +experimental[] Access the Elastic Map Service files via the same mechanism: + +[source,yaml] +---- +url: { + // "type" defaults to "elasticsearch" otherwise + type: emsfile + // Name of the file, exactly as in the Region map visualization + name: World Countries +} +// The result is a geojson file, get its features to use +// this data source with the "shape" marks +// https://vega.github.io/vega/docs/marks/shape/ +format: {property: "features"} +---- + +To enable Maps, the graph must specify `type=map` in the host +configuration: + +[source,yaml] +---- +{ + "config": { + "kibana": { + "type": "map", + + // Initial map position + "latitude": 40.7, // default 0 + "longitude": -74, // default 0 + "zoom": 7, // default 2 + + // defaults to "default". Use false to disable base layer. + "mapStyle": false, + + // default 0 + "minZoom": 5, + + // defaults to the maximum for the given style, + // or 25 when base is disabled + "maxZoom": 13, + + // defaults to true, shows +/- buttons to zoom in/out + "zoomControl": false, + + // Defaults to 'false', disables mouse wheel zoom. If set to + // 'true', map may zoom unexpectedly while scrolling dashboard + "scrollWheelZoom": false, + + // When false, repaints on each move frame. + // Makes the graph slower when moving the map + "delayRepaint": true, // default true + } + }, + /* the rest of Vega JSON */ +} +---- + +The visualization automatically injects a `"projection"`, which you can use to +calculate the position of all geo-aware marks. +Additionally, you can use `latitude`, `longitude`, and `zoom` signals. +These signals can be used in the graph, or can be updated to modify the +position of the map. + +[float] +[[vega-tooltip]] +==== Additional tooltip styling + +{kib} has installed the https://vega.github.io/vega-lite/docs/tooltip.html[Vega tooltip plugin], +so tooltips can be defined in the ways documented there. Beyond that, {kib} also supports +a configuration option for changing the tooltip position and padding: + +```js +{ + config: { + kibana: { + tooltips: { + position: 'top', + padding: 15 + } + } + } +} +``` + +[float] +[[vega-url-loading]] +==== Advanced setting to enable URL loading from any domain + +Vega can load data from any URL, but this is disabled by default in {kib}. +To change this, set `vis_type_vega.enableExternalUrls: true` in `kibana.yml`, +then restart {kib}. + +[float] +[[vega-inspector]] +==== Vega Inspector +Use the contextual *Inspect* tool to gain insights into different elements. +For Vega visualizations, there are two different views: *Request* and *Vega debug*. + +[float] +[[inspect-elasticsearch-requests]] +===== Inspect {es} requests + +Vega uses the {ref}/search-search.html[{es} search API] to get documents and aggregation +results from {es}. To troubleshoot these requests, click *Inspect*, which shows the most recent requests. +In case your specification has more than one request, you can switch between the views using the *View* dropdown. + +[role="screenshot"] +image::visualize/images/vega_tutorial_inspect_requests.png[] + +[float] +[[vega-debugging]] +===== Vega debugging + +With the *Vega debug* view, you can inspect the *Data sets* and *Signal Values* runtime data. + +The runtime data is read from the +https://vega.github.io/vega/docs/api/debugging/#scope[runtime scope]. + +[role="screenshot"] +image::visualize/images/vega_tutorial_inspect_data_sets.png[] + +To debug more complex specs, access to the `view` variable. For more information, refer to +the <>. + +[float] +[[asking-for-help-with-a-vega-spec]] +===== Asking for help with a Vega spec + +Because of the dynamic nature of the data in {es}, it is hard to help you with +Vega specs unless you can share a dataset. To do this, click *Inspect*, select the *Vega debug* view, +then select the *Spec* tab: + +[role="screenshot"] +image::visualize/images/vega_tutorial_getting_help.png[] + +To copy the response, click *Copy to clipboard*. Paste the copied data to +https://gist.github.com/[gist.github.com], possibly with a .json extension. Use the [raw] button, +and share that when asking for help. + +[float] +[[vega-browser-debugging-console]] +==== Browser debugging console + +experimental[] Use browser debugging tools (for example, F12 or Ctrl+Shift+J in Chrome) to +inspect the `VEGA_DEBUG` variable: + +* `view` — Access to the Vega View object. See https://vega.github.io/vega/docs/api/debugging/[Vega Debugging Guide] +on how to inspect data and signals at runtime. For Vega-Lite, +`VEGA_DEBUG.view.data('source_0')` gets the pre-transformed data, and `VEGA_DEBUG.view.data('data_0')` +gets the encoded data. For Vega, it uses the data name as defined in your Vega spec. + +* `vega_spec` — Vega JSON graph specification after some modifications by {kib}. In case +of Vega-Lite, this is the output of the Vega-Lite compiler. + +* `vegalite_spec` — If this is a Vega-Lite graph, JSON specification of the graph before +Vega-Lite compilation. + +[float] +[[vega-expression-functions]] +==== (Vega only) Expression functions which can update the time range and dashboard filters + +{kib} has extended the Vega expression language with these functions: + +```js +/** + * @param {object} query Elastic Query DSL snippet, as used in the query DSL editor + * @param {string} [index] as defined in Kibana, or default if missing + */ +kibanaAddFilter(query, index) + +/** + * @param {object} query Elastic Query DSL snippet, as used in the query DSL editor + * @param {string} [index] as defined in Kibana, or default if missing + */ +kibanaRemoveFilter(query, index) + +kibanaRemoveAllFilters() + +/** + * Update dashboard time filter to the new values + * @param {number|string|Date} start + * @param {number|string|Date} end + */ +kibanaSetTimeFilter(start, end) +``` + +[float] +[[vega-additional-configuration-options]] +==== Additional configuration options + +[source,yaml] +---- +{ + config: { + kibana: { + // Placement of the Vega-defined signal bindings. + // Can be `left`, `right`, `top`, or `bottom` (default). + controlsLocation: top + // Can be `vertical` or `horizontal` (default). + controlsDirection: vertical + // If true, hides most of Vega and Vega-Lite warnings + hideWarnings: true + // Vega renderer to use: `svg` or `canvas` (default) + renderer: canvas + } + } +} +---- + +[[vega-notes]] +[[resources-and-examples]] +=== Resources and examples + +experimental[] To learn more about Vega and Vega-Lite, refer to the resources and examples. + +[float] +[[vega-editor]] +==== Vega editor +The https://vega.github.io/editor/[Vega Editor] includes examples for Vega & Vega-Lite, but does not support any +{kib}-specific features like {es} requests and interactive base maps. + +[float] +[[vega-lite-resources]] +==== Vega-Lite resources +* https://vega.github.io/vega-lite/tutorials/getting_started.html[Tutorials] +* https://vega.github.io/vega-lite/docs/[Docs] +* https://vega.github.io/vega-lite/examples/[Examples] + +[float] +[[vega-resources]] +==== Vega resources +* https://vega.github.io/vega/tutorials/[Tutorials] +* https://vega.github.io/vega/docs/[Docs] +* https://vega.github.io/vega/examples/[Examples] + +TIP: When you use the examples in {kib}, you may +need to modify the "data" section to use absolute URL. For example, +replace `"url": "data/world-110m.json"` with +`"url": "https://vega.github.io/editor/data/world-110m.json"`. diff --git a/docs/user/getting-started.asciidoc b/docs/user/getting-started.asciidoc index 2ff3a09152df4..a877f6a66a79a 100644 --- a/docs/user/getting-started.asciidoc +++ b/docs/user/getting-started.asciidoc @@ -1,19 +1,19 @@ -[[getting-started]] +[[get-started]] = Get started [partintro] -- -Ready to try out {kib} and see what it can do? To quickest way to get started with {kib} is to set up on Cloud, then add a sample data set that helps you get a handle on the full range of {kib} features. +Ready to try out {kib} and see what it can do? The quickest way to get started with {kib} is to set up on Cloud, then add a sample data set to explore the full range of {kib} features. [float] -[[cloud-set-up]] +[[set-up-on-cloud]] == Set up on cloud include::{docs-root}/shared/cloud/ess-getting-started.asciidoc[] [float] -[[get-data-in]] +[[gs-get-data-into-kibana]] == Get data into {kib} The easiest way to get data into {kib} is to add a sample data set. @@ -42,12 +42,11 @@ NOTE: The timestamps in the sample data sets are relative to when they are insta If you uninstall and reinstall a data set, the timestamps change to reflect the most recent installation. [float] -[[getting-started-next-steps]] == Next steps -* To get a hands-on experience creating visualizations, follow the <> tutorial. +* To get a hands-on experience creating visualizations, follow the <> tutorial. -* If you're ready to load an actual data set and build a dashboard, follow the <> tutorial. +* If you're ready to load an actual data set and build a dashboard, follow the <> tutorial. -- @@ -60,5 +59,3 @@ include::{kib-repo-dir}/getting-started/tutorial-define-index.asciidoc[] include::{kib-repo-dir}/getting-started/tutorial-discovering.asciidoc[] include::{kib-repo-dir}/getting-started/tutorial-visualizing.asciidoc[] - -include::{kib-repo-dir}/getting-started/tutorial-dashboard.asciidoc[] diff --git a/docs/user/index.asciidoc b/docs/user/index.asciidoc index abbdbeb68d9cb..e909626c5779c 100644 --- a/docs/user/index.asciidoc +++ b/docs/user/index.asciidoc @@ -2,8 +2,6 @@ include::introduction.asciidoc[] include::whats-new.asciidoc[] -include::getting-started.asciidoc[] - include::setup.asciidoc[] include::monitoring/configuring-monitoring.asciidoc[leveloffset=+1] @@ -13,9 +11,11 @@ include::monitoring/monitoring-kibana.asciidoc[leveloffset=+2] include::security/securing-kibana.asciidoc[] +include::getting-started.asciidoc[] + include::discover.asciidoc[] -include::dashboard.asciidoc[] +include::dashboard/dashboard.asciidoc[] include::canvas.asciidoc[] @@ -25,8 +25,6 @@ include::ml/index.asciidoc[] include::graph/index.asciidoc[] -include::visualize.asciidoc[] - include::{kib-repo-dir}/observability/index.asciidoc[] include::{kib-repo-dir}/logs/index.asciidoc[] diff --git a/docs/user/introduction.asciidoc b/docs/user/introduction.asciidoc index ff936fb4d5569..079d183dd959d 100644 --- a/docs/user/introduction.asciidoc +++ b/docs/user/introduction.asciidoc @@ -83,12 +83,6 @@ image::images/intro-dashboard.png[] {kib} also offers these visualization features: -* <> allows you to display your data in -charts, graphs, and tables -(just to name a few). It's also home to Lens. -Visualize supports the ability to add interactive -controls to your dashboard, filter dashboard content in real time, and add your own images and logos for your brand. - * <> gives you the ability to present your data in a visually compelling, pixel-perfect report. Give your data the “wow” factor needed to impress your CEO or to captivate coworkers with a big-screen display. @@ -98,7 +92,7 @@ questions of your location-based data. Maps supports multiple layers and data sources, mapping of individual geo points and shapes, and dynamic client-side styling. -* <> allows you to combine +* <> allows you to combine an infinite number of aggregations to display complex data. With TSVB, you can analyze multiple index patterns and customize every aspect of your visualization. Choose your own date format and color @@ -161,6 +155,6 @@ and start exploring data in minutes. You can also <> — no code, no additional infrastructure required. -Our <> and in-product guidance can +Our <> and in-product guidance can help you get up and running, faster. Click the help icon image:images/intro-help-icon.png[] in the top navigation bar for help with questions or to provide feedback. diff --git a/docs/user/reporting/automating-report-generation.asciidoc b/docs/user/reporting/automating-report-generation.asciidoc index 3e227229ddcc5..371855deb2f3c 100644 --- a/docs/user/reporting/automating-report-generation.asciidoc +++ b/docs/user/reporting/automating-report-generation.asciidoc @@ -13,7 +13,7 @@ URL that triggers a report to generate. To create the POST URL for PDF reports: -. Go to *Visualize* or *Dashboard*, then open the visualization or dashboard. +. Go to *Dashboard*, then open the visualization or dashboard. + To specify a relative or absolute time period, use the time filter. diff --git a/docs/user/reporting/index.asciidoc b/docs/user/reporting/index.asciidoc index 4f4d59315fafa..50ae92382fb24 100644 --- a/docs/user/reporting/index.asciidoc +++ b/docs/user/reporting/index.asciidoc @@ -11,7 +11,7 @@ saved search, or Canvas workpad. Depending on the object type, you can export th a PDF, PNG, or CSV document, which you can keep for yourself, or share with others. Reporting is available from the *Share* menu -in *Discover*, *Visualize*, *Dashboard*, and *Canvas*. +in *Discover*, *Dashboard*, and *Canvas*. [role="screenshot"] image::user/reporting/images/share-button.png["Share"] diff --git a/docs/user/reporting/reporting-troubleshooting.asciidoc b/docs/user/reporting/reporting-troubleshooting.asciidoc index dc4ffdfebdae9..82f0aa7ca0f19 100644 --- a/docs/user/reporting/reporting-troubleshooting.asciidoc +++ b/docs/user/reporting/reporting-troubleshooting.asciidoc @@ -7,6 +7,7 @@ Having trouble? Here are solutions to common problems you might encounter while using Reporting. +* <> * <> * <> * <> @@ -15,6 +16,11 @@ Having trouble? Here are solutions to common problems you might encounter while * <> * <> +[float] +[[reporting-diagnostics]] +=== Reporting Diagnostics +Reporting comes with a built-in utility to try to automatically find common issues. When Kibana is running, navigate to the Report Listing page, and click the "Run reporting diagnostics..." button. This will open up a diagnostic tool that checks various parts of the Kibana deployment to come up with any relevant recommendations. + [float] [[reporting-troubleshooting-system-dependencies]] === System dependencies diff --git a/docs/user/security/rbac_tutorial.asciidoc b/docs/user/security/rbac_tutorial.asciidoc index 3a4b2202201e2..cc4af9041bcd9 100644 --- a/docs/user/security/rbac_tutorial.asciidoc +++ b/docs/user/security/rbac_tutorial.asciidoc @@ -28,7 +28,7 @@ To complete this tutorial, you'll need the following: * **A space**: In this tutorial, use `Dev Mortgage` as the space name. See <> for details on creating a space. -* **Data**: You can use <> or +* **Data**: You can use <> or live data. In the following steps, Filebeat and Metricbeat data are used. [float] diff --git a/docs/user/security/reporting.asciidoc b/docs/user/security/reporting.asciidoc index 4e02759ce99cb..daf9720a0f1d8 100644 --- a/docs/user/security/reporting.asciidoc +++ b/docs/user/security/reporting.asciidoc @@ -47,7 +47,7 @@ image::user/security/images/reporting-privileges-example.png["Reporting privileg Reporting users typically save searches, create visualizations, and build dashboards. They require a space that provides read and write privileges in -*Discover*, *Visualize*, and *Dashboard*. +*Discover* and *Dashboard*. . Save your new role. diff --git a/docs/user/visualize.asciidoc b/docs/user/visualize.asciidoc deleted file mode 100644 index dc116962f9e96..0000000000000 --- a/docs/user/visualize.asciidoc +++ /dev/null @@ -1,142 +0,0 @@ -[[visualize]] -= Visualize - -[partintro] --- -_Visualize_ enables you to create visualizations of the data from your {es} indices, which you can then add to dashboards for analysis. - -{kib} visualizations are based on {es} queries. By using a series of {es} {ref}/search-aggregations.html[aggregations] to extract and process your data, you can create charts that show you the trends, spikes, and dips you need to know about. - -To begin, open the menu, go to *Visualize*, then click *Create visualization*. - -[float] -[[visualization-types]] -== Types of visualizations - -{kib} supports several types of visualizations. - -<>:: -Quickly build several types of basic visualizations by simply dragging and dropping the data fields you want to display. - -<>:: - -* *Line, area, and bar charts* — Compares different series in X/Y charts. - -* *Pie chart* — Displays each source contribution to a total. - -* *Data table* — Flattens aggregations into table format. - -* *Metric* — Displays a single number. - -* *Goal and gauge* — Displays a number with progress indicators. - -* *Tag cloud* — Displays words in a cloud, where the size of the word corresponds to its importance. - -<>:: Visualizes time series data using pipeline aggregations. - -<>:: Computes and combine data from multiple time series -data sets. - -<>:: -* *<>* — Displays geospatial data in {kib}. - -* <>:: Display shaded cells within a matrix. - -<>:: - -* *Markdown widget* — Displays free-form information or instructions. - -* *Controls* — Adds interactive inputs to a dashboard. - -<>:: Completes control over query and display. - -[float] -[[choose-your-data]] -== Choose your data - -Specify a search query to retrieve the data for your visualization, or used rolled up data. - -* To enter new search criteria, select the <> for the indices that -contain the data you want to visualize. The visualization builder opens -with a wildcard query that matches all of the documents in the selected -indices. - -* To build a visualization from a saved search, click the name of the saved -search you want to use. The visualization builder opens and loads the -selected query. -+ -NOTE: When you build a visualization from a saved search, any subsequent -modifications to the saved search are reflected in the -visualization. To disable automatic updates, delete the visualization -on the *Saved Object* page. - -* To build a visualization using <>, select -the index pattern that includes the data. Rolled up data is summarized into -time buckets that can be split into sub buckets for numeric field values or -terms. To lower granularity, use a time aggregation that uses and combines -several time buckets. For an example, refer to <>. - -[float] -[[vis-inspector]] -== Inspect visualizations - -Many visualizations allow you to inspect the query and data behind the visualization. - -. In the {kib} toolbar, click *Inspect*. -. To download the data, click *Download CSV*, then choose one of the following options: -* *Formatted CSV* - Downloads the data in table format. -* *Raw CSV* - Downloads the data as provided. -. To view the requests for collecting data, select *Requests* from the *View* -dropdown. - -[float] -[[save-visualize]] -== Save visualizations -To use your visualizations in <>, you must save them. - -. In the {kib} toolbar, click *Save*. -. Enter the visualization *Title* and optional *Description*, then *Save* the visualization. - -To access the saved visualization, go to *Management > {kib} > Saved Objects*. - -[float] -[[save-visualization-read-only-access]] -==== Read only access -When you have insufficient privileges to save visualizations, the following indicator is -displayed and the *Save* button is not visible. - -For more information, refer to <>. - -[role="screenshot"] -image::visualize/images/read-only-badge.png[Example of Visualize's read only access indicator in Kibana's header] - -[float] -[[visualize-share-options]] -== Share visualizations - -When you've finished your visualization, you can share it outside of {kib}. - -From the *Share* menu, you can: - -* Embed the code in a web page. Users must have {kib} access -to view an embedded visualization. -* Share a direct link to a {kib} visualization. -* Generate a PDF report. -* Generate a PNG report. - --- -include::{kib-repo-dir}/visualize/aggregations.asciidoc[] - -include::{kib-repo-dir}/visualize/lens.asciidoc[] - -include::{kib-repo-dir}/visualize/most-frequent.asciidoc[] - -include::{kib-repo-dir}/visualize/tsvb.asciidoc[] - -include::{kib-repo-dir}/visualize/timelion.asciidoc[] - -include::{kib-repo-dir}/visualize/tilemap.asciidoc[] - -include::{kib-repo-dir}/visualize/for-dashboard.asciidoc[] - -include::{kib-repo-dir}/visualize/vega.asciidoc[] diff --git a/docs/visualize/aggregations.asciidoc b/docs/visualize/aggregations.asciidoc deleted file mode 100644 index ef38f716f2303..0000000000000 --- a/docs/visualize/aggregations.asciidoc +++ /dev/null @@ -1,110 +0,0 @@ -[[supported-aggregations]] -== Supported aggregations - -Use the supported aggregations to build your visualizations. - -[float] -[[visualize-metric-aggregations]] -=== Metric aggregations - -Metric aggregations extract field from documents to generate data values. - -{ref}/search-aggregations-metrics-avg-aggregation.html[Average]:: The mean value. - -{ref}/search-aggregations-metrics-valuecount-aggregation.html[Count]:: The total number of documents that match the query, which allows you to visualize the number of documents in a bucket. Count is the default value. - -{ref}/search-aggregations-metrics-max-aggregation.html[Max]:: The highest value. - -{ref}/search-aggregations-metrics-percentile-aggregation.html[Median]:: The value that is in the 50% percentile. - -{ref}/search-aggregations-metrics-min-aggregation.html[Min]:: The lowest value. - -{ref}/search-aggregations-metrics-percentile-rank-aggregation.html[Percentile ranks]:: Returns the percentile rankings for the values in the specified numeric field. Select a numeric field from the drop-down, then specify one or more percentile rank values in the *Values* fields. - -{ref}/search-aggregations-metrics-percentile-aggregation.html[Percentiles]:: Divides the -values in a numeric field into specified percentile bands. Select a field from the drop-down, then specify one or more ranges in the *Percentiles* fields. - -Standard Deviation:: Requires a numeric field. Uses the {ref}/search-aggregations-metrics-extendedstats-aggregation.html[_extended stats_] aggregation. - -{ref}/search-aggregations-metrics-sum-aggregation.html[Sum]:: The total value. - -{ref}/search-aggregations-metrics-top-hits-aggregation.html[Top hit]:: Returns a sample of individual documents. When the Top Hit aggregation is matched to more than one document, you must choose a technique for combining the values. Techniques include average, minimum, maximum, and sum. - -Unique Count:: The {ref}/search-aggregations-metrics-cardinality-aggregation.html[Cardinality] of the field within the bucket. - -Alternatively, you can override the field values with a script using JSON input. For example: - -[source,shell] -{ "script" : "doc['grade'].value * 1.2" } - -The example implements a {es} {ref}/search-aggregations.html[Script Value Source], which replaces -the value in the metric. The options available depend on the aggregation you choose. - -[float] -[[visualize-parent-pipeline-aggregations]] -=== Parent pipeline aggregations - -Parent pipeline aggregations assume the bucket aggregations are ordered and are especially useful for time series data. For each parent pipeline aggregation, you must define a bucket aggregation and metric aggregation. - -You can also nest these aggregations. For example, if you want to produce a third derivative. - -{ref}/search-aggregations-pipeline-bucket-script-aggregation.html[Bucket script]:: Executes a script that performs computations for each bucket that specifies metrics in the parent multi-bucket aggregation. - -{ref}/search-aggregations-pipeline-cumulative-sum-aggregation.html[Cumulative sum]:: Calculates the cumulative sum of a specified metric in a parent histogram. - -{ref}/search-aggregations-pipeline-derivative-aggregation.html[Derivative]:: Calculates the derivative of specific metrics. - -{ref}/search-aggregations-pipeline-movavg-aggregation.html[Moving avg]:: Slides a window across the data and emits the average value of the window. - -{ref}/search-aggregations-pipeline-serialdiff-aggregation.html[Serial diff]:: Values in a time series are subtracted from itself at different time lags or periods. - -[float] -[[visualize-sibling-pipeline-aggregations]] -=== Sibling pipeline aggregations - -Sibling pipeline aggregations condense many buckets into one. For each sibling pipeline aggregation, you must define a bucket aggregations and metric aggregation. - -{ref}/search-aggregations-pipeline-avg-bucket-aggregation.html[Average bucket]:: Calculates the mean, or average, value of a specified metric in a sibling aggregation. - -{ref}/search-aggregations-pipeline-avg-bucket-aggregation.html[Max Bucket]:: Calculates the maximum value of a specified metric in a sibling aggregation. - -{ref}/search-aggregations-pipeline-avg-bucket-aggregation.html[Min Bucket]:: Calculates the minimum value of a specified metric in a sibling aggregation. - -{ref}/search-aggregations-pipeline-avg-bucket-aggregation.html[Sum Bucket]:: Calculates the sum of the values of a specified metric in a sibling aggregation. - -[float] -[[visualize-bucket-aggregations]] -=== Bucket aggregations - -Bucket aggregations sort documents into buckets, depending on the contents of the document. - -{ref}/search-aggregations-bucket-datehistogram-aggregation.html[Date histogram]:: Splits a date field into buckets by interval. If the date field is the primary time field for the index pattern, it chooses an automatic interval for you. Intervals are labeled at the start of the interval, using the date-key returned by {es}. For example, the tooltip for a monthly interval displays the first day of the month. - -{ref}/search-aggregations-bucket-daterange-aggregation.html[Date range]:: Reports values that are within a range of dates that you specify. You can specify the ranges for the dates using {ref}/common-options.html#date-math[_date math_] expressions. - -{ref}/search-aggregations-bucket-filter-aggregation.html[Filter]:: Each filter creates a bucket of documents. You can specify a filter as a -<> or <> query string. - -{ref}/search-aggregations-bucket-geohashgrid-aggregation.html[Geohash]:: Displays points based on a geohash. Supported by data table visualizations and <>. - -{ref}/search-aggregations-bucket-geotilegrid-aggregation.html[Geotile]:: Groups points based on web map tiling. Supported by data table visualizations and <>. - -{ref}/search-aggregations-bucket-histogram-aggregation.html[Histogram]:: Builds from a numeric field. - -{ref}/search-aggregations-bucket-iprange-aggregation.html[IPv4 range]:: Specify ranges of IPv4 addresses. - -{ref}/search-aggregations-bucket-range-aggregation.html[Range]:: Specify ranges of values for a numeric field. - -{ref}/search-aggregations-bucket-significantterms-aggregation.html[Significant terms]:: Returns interesting or unusual occurrences of terms in a set. Supports {es} {ref}/search-aggregations-bucket-terms-aggregation.html#_filtering_values_4[exclude and include patterns]. - -{ref}/search-aggregations-bucket-terms-aggregation.html[Terms]:: Specify the top or bottom _n_ elements of a given field to display, ordered by count or a custom metric. Supports {es} {ref}/search-aggregations-bucket-terms-aggregation.html#_filtering_values_4[exclude and include patterns]. - -{kib} filters string fields with only regular expression patterns, and does not filter numeric fields or match with arrays. - -For example: - -* You want to exclude the metricbeat process from your visualization of top processes: `metricbeat.*` -* You only want to show processes collecting beats: `.*beat` -* You want to exclude two specific values, the string `"empty"` and `"none"`: `empty|none` - -Patterns are case sensitive. diff --git a/docs/visualize/for-dashboard.asciidoc b/docs/visualize/for-dashboard.asciidoc deleted file mode 100644 index 400179e9ceae7..0000000000000 --- a/docs/visualize/for-dashboard.asciidoc +++ /dev/null @@ -1,67 +0,0 @@ -[[for-dashboard]] -== Dashboard tools - -Visualize comes with controls and Markdown tools that you can add to dashboards for an interactive experience. - -[float] -[[controls]] -=== Controls -experimental[] - -The controls tool enables you to add interactive inputs -on a dashboard. - -You can add two types of interactive inputs: - -* *Options list* — Filters content based on one or more specified options. The dropdown menu is dynamically populated with the results of a terms aggregation. For example, use the options list on the sample flight dashboard when you want to filter the data by origin city and destination city. - -* *Range slider* — Filters data within a specified range of numbers. The minimum and maximum values are dynamically populated with the results of a min and max aggregation. For example, use the range slider when you want to filter the sample flight dashboard by a specific average ticket price. - -[role="screenshot"] -image::images/dashboard-controls.png[] - -[float] -[[controls-options]] -==== Controls options - -Configure the settings that apply to the interactive inputs on a dashboard. - -. Click *Options*, then configure the following: - -* *Update {kib} filters on each change* — When selected, all interactive inputs create filters that refresh the dashboard. When unselected, {kib} filters are created only when you click *Apply changes*. - -* *Use time filter* — When selected, the aggregations that generate the options list and time range are connected to the <>. - -* *Pin filters to global state* — When selected, all filters created by interacting with the inputs are automatically pinned. - -. Click *Update*. - -[float] -[[markdown-widget]] -=== Markdown - -The Markdown tool is a text entry field that accepts GitHub-flavored Markdown text. When you enter the text, the tool populates the results on the dashboard. - -Markdown is helpful when you want to include important information, instructions, and images on your dashboard. - -For information about GitHub-flavored Markdown text, click *Help*. - -For example, when you enter: - -[role="screenshot"] -image::images/markdown_example_1.png[] - -The following instructions are displayed: - -[role="screenshot"] -image::images/markdown_example_2.png[] - -Or when you enter: - -[role="screenshot"] -image::images/markdown_example_3.png[] - -The following image is displayed: - -[role="screenshot"] -image::images/markdown_example_4.png[] diff --git a/docs/visualize/images/lens_drag_drop.gif b/docs/visualize/images/lens_drag_drop.gif index ca62115e7ea3a..1f8580d462702 100644 Binary files a/docs/visualize/images/lens_drag_drop.gif and b/docs/visualize/images/lens_drag_drop.gif differ diff --git a/docs/visualize/lens.asciidoc b/docs/visualize/lens.asciidoc deleted file mode 100644 index 6e51433bca3f6..0000000000000 --- a/docs/visualize/lens.asciidoc +++ /dev/null @@ -1,173 +0,0 @@ -[role="xpack"] -[[lens]] -== Lens - -beta[] - -*Lens* is a simple and fast way to create visualizations of your {es} data. To create visualizations, -you drag and drop your data fields onto the visualization builder pane, and *Lens* automatically generates -a visualization that best displays your data. - -With Lens, you can: - -* Use the automatically generated visualization suggestions to change the visualization type. - -* Create visualizations with multiple layers and indices. - -* Add your visualizations to dashboards and Canvas workpads. - -To get started with *Lens*, select a field in the data panel, then drag and drop the field on a highlighted area. - -[role="screenshot"] -image::images/lens_drag_drop.gif[Drag and drop] - -You can incorporate many fields into your visualization, and Lens uses heuristics to decide how to apply each one to the visualization. - -TIP: Drag-and-drop capabilities are available only when Lens knows how to use the data. If *Lens* is unable to automatically generate a visualization, -you can still configure the customization options for your visualization. - -[float] -[[apply-lens-filters]] -==== Change the data panel fields - -The fields in the data panel are based on the selected <> and <>. - -To change the index pattern, click it, then select a new one. The fields in the data panel automatically update. - -To filter the fields in the data panel: - -* Enter the name in *Search field names*. - -* Click *Filter by type*, then select the filter. To show all of the fields in the index pattern, deselect *Only show fields with data*. - -[float] -[[view-data-summaries]] -==== Data summaries - -To help you decide exactly the data you want to display, get a quick summary of each field. The summary shows the distribution of values within the selected time range. - -To view the field summary information, navigate to the field, then click *i*. - -[role="screenshot"] -image::images/lens_data_info.png[Data summary window] - -[float] -[[change-the-visualization-type]] -==== Change the visualization type - -*Lens* enables you to switch between any supported visualization type at any time. - -*Suggestions* are shortcuts to alternate visualizations that *Lens* generates for you. - -[role="screenshot"] -image::images/lens_suggestions.gif[Visualization suggestions] - -If you'd like to use a visualization type that is not suggested, click the visualization type, -then select a new one. - -[role="screenshot"] -image::images/lens_viz_types.png[] - -When there is an exclamation point (!) -next to a visualization type, Lens is unable to transfer your data, but -still allows you to make the change. - -[float] -[[customize-operation]] -==== Change the aggregation and labels - -For each visualization, Lens allows some customizations of the data. - -. Click *Drop a field here* or the field name in the column. - -. Change the options that appear. Options vary depending on the type of field. -+ -[role="screenshot"] -image::images/lens_aggregation_labels.png[Quick function options] - -[float] -[[layers]] -==== Add layers and indices - -Area, line, and bar charts allow you to visualize multiple data layers and indices so that you can compare and analyze data from multiple sources. - -To add a layer, click *+*, then drag and drop the fields for the new layer. - -[role="screenshot"] -image::images/lens_layers.png[Add layers] - -To view a different index, click it, then select a new one. - -[role="screenshot"] -image::images/lens_index_pattern.png[Add index pattern] - -[float] -[[lens-tutorial]] -=== Lens tutorial - -Ready to create your own visualization with Lens? Use the following tutorial to create a visualization that -lets you compare sales over time. - -[float] -[[lens-before-begin]] -==== Before you begin - -To start, you'll need to add the <>. - -[float] -==== Build the visualization - -Drag and drop your data onto the visualization builder pane. - -. Select the *kibana_sample_data_ecommerce* index pattern. - -. Click image:images/time-filter-calendar.png[], then click *Last 7 days*. -+ -The fields in the data panel update. - -. Drag and drop the *taxful_total_price* data field to the visualization builder pane. -+ -[role="screenshot"] -image::images/lens_tutorial_1.png[Lens tutorial] - -To display the average order prices over time, *Lens* automatically added in *order_date* field. - -To break down your data, drag the *category.keyword* field to the visualization builder pane. Lens -knows that you want to show the top categories and compare them across the dates, -and creates a chart that compares the sales for each of the top three categories: - -[role="screenshot"] -image::images/lens_tutorial_2.png[Lens tutorial] - -[float] -[[customize-lens-visualization]] -==== Customize your visualization - -Make your visualization look exactly how you want with the customization options. - -. Click *Average of taxful_total_price*, then change the *Label* to `Sales`. -+ -[role="screenshot"] -image::images/lens_tutorial_3.1.png[Lens tutorial] - -. Click *Top values of category.keyword*, then change *Number of values* to `10`. -+ -[role="screenshot"] -image::images/lens_tutorial_3.2.png[Lens tutorial] -+ -The visualization updates to show there are only six available categories. -+ -Look at the *Suggestions*. An area chart is not an option, but for the sales data, a stacked area chart might be the best option. - -. To switch the chart type, click *Stacked bar chart* in the column, then click *Stacked area* from the *Select a visualizations* window. -+ -[role="screenshot"] -image::images/lens_tutorial_3.png[Lens tutorial] - -[float] -[[lens-tutorial-next-steps]] -==== Next steps - -Now that you've created your visualization, you can add it to a dashboard or Canvas workpad. - -For more information, refer to <> or <>. diff --git a/docs/visualize/most-frequent.asciidoc b/docs/visualize/most-frequent.asciidoc deleted file mode 100644 index f716930e7e65c..0000000000000 --- a/docs/visualize/most-frequent.asciidoc +++ /dev/null @@ -1,59 +0,0 @@ -[[most-frequent]] -== Most frequently used visualizations - -The most frequently used visualizations allow you to plot aggregated data from a <> or <>. - -The most frequently used visualizations include: - -* Line, area, and bar charts -* Pie chart -* Data table -* Metric, goal, and gauge -* Tag cloud - -[[metric-chart]] - -[float] -=== Configure your visualization - -You configure visualizations using the default editor. Each visualization supports different configurations of the metrics and buckets. - -For example, a bar chart allows you to add an x-axis: - -[role="screenshot"] -image::images/add-bucket.png["",height=478] - -A common configuration for the x-axis is to use a {es} {ref}/search-aggregations-bucket-datehistogram-aggregation.html[date histogram] aggregation: - -[role="screenshot"] -image::images/visualize-date-histogram.png[] - -To see your changes, click *Apply changes* image:images/apply-changes-button.png[] - -If it's supported by the visualization, you can add more buckets. In this example we have -added a -{es} {ref}/search-aggregations-bucket-terms-aggregation.html[terms] aggregation on the field -`geo.src` to show the top 5 sources of log traffic. - -[role="screenshot"] -image::images/visualize-date-histogram-split-1.png[] - -The new aggregation is added after the first one, so the result shows -the top 5 sources of traffic per 3 hours. If you want to change the aggregation order, you can do -so by dragging: - -[role="screenshot"] -image::images/visualize-drag-reorder.png["",width=366] - -The visualization -now shows the top 5 sources of traffic overall, and compares them in 3 hour increments: - -[role="screenshot"] -image::images/visualize-date-histogram-split-2.png[] - -For more information about how aggregations are used in visualizations, see <>. - -Each visualization also has its own customization options. Most visualizations allow you to customize the color of a specific series: - -[role="screenshot"] -image::images/color-picker.png[An array of color dots that users can select,height=267] diff --git a/docs/visualize/tilemap.asciidoc b/docs/visualize/tilemap.asciidoc deleted file mode 100644 index c889bd0bb6ca0..0000000000000 --- a/docs/visualize/tilemap.asciidoc +++ /dev/null @@ -1,27 +0,0 @@ -[[heat-map]] -== Heat map - -Display graphical representations of data where the individual values are represented by colors. Use heat maps when your data set includes categorical data. For example, use a heat map to see the flights of origin countries compared to destination countries using the sample flight data. - -[role="screenshot"] -image::images/visualize_heat_map_example.png[] - -[float] -[[navigate-heatmap]] -=== Change the color ranges - -When only one color displays on the heat map, you might need to change the color ranges. - -To specify the number of color ranges: - -. Click *Options*. - -. Enter the *Number of colors* to display. - -To specify custom ranges: - -. Click *Options*. - -. Select *Use custom ranges*. - -. Enter the ranges to display. diff --git a/docs/visualize/timelion.asciidoc b/docs/visualize/timelion.asciidoc deleted file mode 100644 index 4869664fab0a4..0000000000000 --- a/docs/visualize/timelion.asciidoc +++ /dev/null @@ -1,547 +0,0 @@ -[[timelion]] -== Timelion - -Timelion is a time series data visualizer that enables you to combine totally -independent data sources within a single visualization. It's driven by a simple -expression language you use to retrieve time series data, perform calculations -to tease out the answers to complex questions, and visualize the results. - -For example, Timelion enables you to easily get the answers to questions like: - -* <> -* <> -* <> - -[float] -[[time-series-before-you-begin]] -=== Before you begin - -In this tutorial, you'll use the time series data from https://www.elastic.co/guide/en/beats/metricbeat/current/index.html[Metricbeat]. To ingest the data locally, link:https://www.elastic.co/downloads/beats/metricbeat[download Metricbeat]. - -[float] -[[time-series-intro]] -=== Create time series visualizations - -To compare the real-time percentage of CPU time spent in user space to the results offset by one hour, create a time series visualization. - -[float] -[[time-series-define-functions]] -==== Define the functions - -To start tracking the real-time percentage of CPU, enter the following in the *Timelion Expression* field: - -[source,text] ----------------------------------- -.es(index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') ----------------------------------- - -[role="screenshot"] -image::images/timelion-create01.png[] -{nbsp} - -[float] -[[time-series-compare-data]] -==== Compare the data - -To compare the two data sets, add another series with data from the previous hour, separated by a comma: - -[source,text] ----------------------------------- -.es(index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct'), -.es(offset=-1h, <1> - index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') ----------------------------------- - -<1> `offset` offsets the data retrieval by a date expression. In this example, `-1h` offsets the data back by one hour. - -[role="screenshot"] -image::images/timelion-create02.png[] -{nbsp} - -[float] -[[time-series-add-labels]] -==== Add label names - -To easily distinguish between the two data sets, add the label names: - -[source,text] ----------------------------------- -.es(offset=-1h,index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct').label('last hour'), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct').label('current hour') <1> ----------------------------------- - -<1> `.label()` adds custom labels to the visualization. - -[role="screenshot"] -image::images/timelion-create03.png[] -{nbsp} - -[float] -[[time-series-title]] -==== Add a title - -Add a meaningful title: - -[source,text] ----------------------------------- -.es(offset=-1h, - index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('last hour'), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('current hour') - .title('CPU usage over time') <1> ----------------------------------- - -<1> `.title()` adds a title with a meaningful name. Titles make is easier for unfamiliar users to understand the purpose of the visualization. - -[role="screenshot"] -image::images/timelion-customize01.png[] -{nbsp} - -[float] -[[time-series-change-chart-type]] -==== Change the chart type - -To differentiate between the current hour data and the last hour data, change the chart type: - -[source,text] ----------------------------------- -.es(offset=-1h, - index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('last hour') - .lines(fill=1,width=0.5), <1> -.es(index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('current hour') - .title('CPU usage over time') ----------------------------------- - -<1> `.lines()` changes the appearance of the chart lines. In this example, `.lines(fill=1,width=0.5)` sets the fill level to `1`, and the border width to `0.5`. - -[role="screenshot"] -image::images/timelion-customize02.png[] -{nbsp} - -[float] -[[time-series-change-color]] -==== Change the line colors - -To make the current hour data stand out, change the line colors: - -[source,text] ----------------------------------- -.es(offset=-1h, - index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('last hour') - .lines(fill=1,width=0.5) - .color(gray), <1> -.es(index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('current hour') - .title('CPU usage over time') - .color(#1E90FF) ----------------------------------- - -<1> `.color()` changes the color of the data. Supported color types include standard color names, hexadecimal values, or a color schema for grouped data. In this example, `.color(gray)` represents the last hour, and `.color(#1E90FF)` represents the current hour. - -[role="screenshot"] -image::images/timelion-customize03.png[] -{nbsp} - -[float] -[[time-series-adjust-legend]] -==== Make adjustments to the legend - -Change the position and style of the legend: - -[source,text] ----------------------------------- -.es(offset=-1h, - index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('last hour') - .lines(fill=1,width=0.5) - .color(gray), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='avg:system.cpu.user.pct') - .label('current hour') - .title('CPU usage over time') - .color(#1E90FF) - .legend(columns=2, position=nw) <1> ----------------------------------- - -<1> `.legend()` sets the position and style of the legend. In this example, `.legend(columns=2, position=nw)` places the legend in the north west position of the visualization with two columns. - -[role="screenshot"] -image::images/timelion-customize04.png[] -{nbsp} - -[float] -[[mathematical-functions-intro]] -=== Create visualizations with mathematical functions - -To create a visualization for inbound and outbound network traffic, use mathematical functions. - -[float] -[[mathematical-functions-define-functions]] -==== Define the functions - -To start tracking the inbound and outbound network traffic, enter the following in the *Timelion Expression* field: - -[source,text] ----------------------------------- -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.in.bytes) ----------------------------------- - -[role="screenshot"] -image::images/timelion-math01.png[] -{nbsp} - -[float] -[[mathematical-functions-plot-change]] -==== Plot the rate of change - -Change how the data is displayed so that you can easily monitor the inbound traffic: - -[source,text] ----------------------------------- -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.in.bytes) - .derivative() <1> ----------------------------------- - -<1> `.derivative` plots the change in values over time. - -[role="screenshot"] -image::images/timelion-math02.png[] -{nbsp} - -Add a similar calculation for outbound traffic: - -[source,text] ----------------------------------- -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.in.bytes) - .derivative(), -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.out.bytes) - .derivative() - .multiply(-1) <1> ----------------------------------- - -<1> `.multiply()` multiplies the data series by a number, the result of a data series, or a list of data series. For this example, `.multiply(-1)` converts the outbound network traffic to a negative value since the outbound network traffic is leaving your machine. - -[role="screenshot"] -image::images/timelion-math03.png[] -{nbsp} - -[float] -[[mathematical-functions-convert-data]] -==== Change the data metric - -To make the visualization easier to analyze, change the data metric from bytes to megabytes: - -[source,text] ----------------------------------- -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.in.bytes) - .derivative() - .divide(1048576), -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.out.bytes) - .derivative() - .multiply(-1) - .divide(1048576) <1> ----------------------------------- - -<1> `.divide()` accepts the same input as `.multiply()`, then divides the data series by the defined divisor. - -[role="screenshot"] -image::images/timelion-math04.png[] -{nbsp} - -[float] -[[mathematical-functions-add-labels]] -==== Customize and format the visualization - -Customize and format the visualization using functions: - -[source,text] ----------------------------------- -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.in.bytes) - .derivative() - .divide(1048576) - .lines(fill=2, width=1) - .color(green) - .label("Inbound traffic") <1> - .title("Network traffic (MB/s)"), <2> -.es(index=metricbeat*, - timefield=@timestamp, - metric=max:system.network.out.bytes) - .derivative() - .multiply(-1) - .divide(1048576) - .lines(fill=2, width=1) <3> - .color(blue) <4> - .label("Outbound traffic") - .legend(columns=2, position=nw) <5> ----------------------------------- - -<1> `.label()` adds custom labels to the visualization. -<2> `.title()` adds a title with a meaningful name. -<3> `.lines()` changes the appearance of the chart lines. In this example, `.lines(fill=2, width=1)` sets the fill level to `2`, and the border width to `1`. -<4> `.color()` changes the color of the data. Supported color types include standard color names, hexadecimal values, or a color schema for grouped data. In this example, `.color(green)` represents the inbound network traffic, and `.color(blue)` represents the outbound network traffic. -<5> `.legend()` sets the position and style of the legend. For this example, `legend(columns=2, position=nw)` places the legend in the north west position of the visualization with two columns. - -[role="screenshot"] -image::images/timelion-math05.png[] -{nbsp} - -[float] -[[timelion-conditional-intro]] -=== Create visualizations with conditional logic and tracking trends - -To easily detect outliers and discover patterns over time, modify time series data with conditional logic and create a trend with a moving average. - -With Timelion conditional logic, you can use the following operator values to compare your data: - -[horizontal] -`eq`:: equal -`ne`:: not equal -`lt`:: less than -`lte`:: less than or equal to -`gt`:: greater than -`gte`:: greater than or equal to - -[float] -[[conditional-define-functions]] -==== Define the functions - -To chart the maximum value of `system.memory.actual.used.bytes`, enter the following in the *Timelion Expression* field: - -[source,text] ----------------------------------- -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') ----------------------------------- - -[role="screenshot"] -image::images/timelion-conditional01.png[] -{nbsp} - -[float] -[[conditional-track-memory]] -==== Track used memory - -To track the amount of memory used, create two thresholds: - -[source,text] ----------------------------------- -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .if(gt, <1> - 11300000000, <2> - .es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), - null) - .label('warning') - .color('#FFCC11'), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .if(gt, - 11375000000, - .es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), - null) - .label('severe') - .color('red') ----------------------------------- - -<1> Timelion conditional logic for the _greater than_ operator. In this example, the warning threshold is 11.3GB (`11300000000`), and the severe threshold is 11.375GB (`11375000000`). If the threshold values are too high or low for your machine, adjust the values accordingly. -<2> `if()` compares each point to a number. If the condition evaluates to `true`, adjust the styling. If the condition evaluates to `false`, use the default styling. - -[role="screenshot"] -image::images/timelion-conditional02.png[] -{nbsp} - -[float] -[[conditional-determine-trend]] -==== Determine the trend - -To determine the trend, create a new data series: - -[source,text] ----------------------------------- -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .if(gt,11300000000, - .es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), - null) - .label('warning') - .color('#FFCC11'), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .if(gt,11375000000, - .es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), - null). - label('severe') - .color('red'), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .mvavg(10) <1> ----------------------------------- - -<1> `mvavg()` calculates the moving average over a specified period of time. In this example, `.mvavg(10)` creates a moving average with a window of 10 data points. - -[role="screenshot"] -image::images/timelion-conditional03.png[] -{nbsp} - -[float] -[[conditional-format-visualization]] -==== Customize and format the visualization - -Customize and format the visualization using functions: - -[source,text] ----------------------------------- -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .label('max memory') <1> - .title('Memory consumption over time'), <2> -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .if(gt, - 11300000000, - .es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), - null) - .label('warning') - .color('#FFCC11') <3> - .lines(width=5), <4> -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .if(gt, - 11375000000, - .es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes'), - null) - .label('severe') - .color('red') - .lines(width=5), -.es(index=metricbeat-*, - timefield='@timestamp', - metric='max:system.memory.actual.used.bytes') - .mvavg(10) - .label('mvavg') - .lines(width=2) - .color(#5E5E5E) - .legend(columns=4, position=nw) <5> ----------------------------------- - -<1> `.label()` adds custom labels to the visualization. -<2> `.title()` adds a title with a meaningful name. -<3> `.color()` changes the color of the data. Supported color types include standard color names, hexadecimal values, or a color schema for grouped data. -<4> `.lines()` changes the appearance of the chart lines. In this example, .lines(width=5) sets border width to `5`. -<5> `.legend()` sets the position and style of the legend. For this example, `(columns=4, position=nw)` places the legend in the north west position of the visualization with four columns. - -[role="screenshot"] -image::images/timelion-conditional04.png[] -{nbsp} - -For additional information on Timelion conditional capabilities, go to https://www.elastic.co/blog/timeseries-if-then-else-with-timelion[I have but one .condition()]. - -[float] -[[timelion-deprecation]] -=== Timelion App deprecation - -Deprecated since 7.0, the Timelion app will be removed in 8.0. If you have any Timelion worksheets, you must migrate them to a dashboard. - -NOTE: Only the Timelion app is deprecated. {kib} continues to support Timelion visualizations on dashboards, in Visualize, and in Canvas. - -[float] -[[timelion-app-to-vis]] -==== Create a dashboard from a Timelion worksheet - -To replace a Timelion worksheet with a dashboard, follow the same process for adding a visualization. -In addition, you must migrate the Timelion graphs to Visualize. - -. Open the menu, click **Dashboard**, then click **Create dashboard**. - -. On the dashboard, click **Create New**, then select the Timelion visualization. -+ -[role="screenshot"] -image::images/timelion-create-new-dashboard.png[] -+ -The only thing you need is the Timelion expression for each graph. - -. Open the Timelion app on a new tab, select the chart you want to copy, and copy its expression. -+ -[role="screenshot"] -image::images/timelion-copy-expression.png[] - -. Return to the other tab and paste the copied expression to the *Timelion Expression* field and click **Update**. -+ -[role="screenshot"] -image::images/timelion-vis-paste-expression.png[] - -. Save the new visualization, give it a name, and click **Save and Return**. -+ -Your Timelion visualization will appear on the dashboard. Repeat this for all your charts on each worksheet. -+ -[role="screenshot"] -image::images/timelion-dashboard.png[] diff --git a/docs/visualize/tsvb.asciidoc b/docs/visualize/tsvb.asciidoc deleted file mode 100644 index 9a1e81670b654..0000000000000 --- a/docs/visualize/tsvb.asciidoc +++ /dev/null @@ -1,138 +0,0 @@ -[[TSVB]] -== TSVB - -TSVB is a time series data visualizer that allows you to use the full power of the -Elasticsearch aggregation framework. With TSVB, you can combine an infinite -number of aggregations to display complex data. - -NOTE: In Elasticsearch version 7.3.0 and later, the time series data visualizer is now referred to as TSVB instead of Time Series Visual Builder. - -[float] -[[tsvb-visualization-types]] -=== Types of TSVB visualizations - -TSVB comes with these types of visualizations: - -Time Series:: A histogram visualization that supports area, line, bar, and steps along with multiple y-axis. - -[role="screenshot"] -image:images/tsvb-screenshot.png["Time series visualization"] - -Metric:: A metric that displays the latest number in a data series. - -[role="screenshot"] -image:images/tsvb-metric.png["Metric visualization"] - -Top N:: A horizontal bar chart where the y-axis is based on a series of metrics, and the x-axis is the latest value in the series. - -[role="screenshot"] -image:images/tsvb-top-n.png["Top N visualization"] - -Gauge:: A single value gauge visualization based on the latest value in a series. - -[role="screenshot"] -image:images/tsvb-gauge.png["Gauge visualization"] - -Markdown:: Edit the data using using Markdown text and Mustache template syntax. - -[role="screenshot"] -image:images/tsvb-markdown.png["Markdown visualization"] - -Table:: Display data from multiple time series by defining the field group to show in the rows, and the columns of data to display. - -[role="screenshot"] -image:images/tsvb-table.png["Table visualization"] - -[float] -[[create-tsvb-visualization]] -=== Create TSVB visualizations - -To create a TSVB visualization, choose the data series you want to display, then choose how you want to display the data. The options available are dependent on the visualization. - -[float] -[[tsvb-data-series-options]] -==== Configure the data series - -To create a single metric, add multiple data series with multiple aggregations. - -. Select the visualization type. - -. Specify the data series labels and colors. - -.. Select *Data*. -+ -If you are using the *Table* visualization, select *Columns*. - -.. In the *Label* field, enter a name for the data series, which is used on legends and titles. -+ -For series that are grouped by a term, you can specify a mustache variable of `{{key}}` to substitute the term. - -.. If supported by the visualization, click the swatch and choose a color for the data series. - -.. To add another data series, click *+*, then repeat the steps to specify the labels and colors. - -. Specify the data series metrics. - -.. Select *Metrics*. - -.. From the dropdown lists, choose your options. - -.. To add another metric, click *+*. -+ -When you add more than one metric, the last metric value is displayed, which is indicated by the eye icon. - -. To specify the format and display options, select *Options*. - -. To specify how to group or split the data, choose an option from the *Group by* drop down list. -+ -By default, the data series are grouped by everything. - -[float] -[[tsvb-panel-options]] -==== Configure the panel - -Change the data that you want to display and choose the style options for the panel. - -. Select *Panel options*. - -. Under *Data*, specify how much of the data that you want to display in the visualization. - -. Under *Style*, specify how you want the visualization to look. - -[float] -[[tsvb-add-annotations]] -==== Add annotations - -If you are using the Time Series visualization, add annotation data sources. - -. Select *Annotations*. - -. Click *Add data source*, then specify the options. - -[float] -[[tsvb-enter-markdown]] -==== Enter Markdown text - -Edit the source for the Markdown visualization. - -. Select *Markdown*. - -. In the editor, enter enter your Markdown text, then press Enter. - -. To insert the mustache template variable into the editor, click the variable name. -+ -The http://mustache.github.io/mustache.5.html[mustache syntax] uses the Handlebar.js processor, which is an extended version of the Mustache template language. - -[float] -[[tsvb-style-markdown]] -==== Style Markdown text - -Style your Markdown visualization using http://lesscss.org/features/[less syntax]. - -. Select *Markdown*. - -. Select *Panel options*. - -. Enter styling rules in *Custom CSS* section -+ -Less in TSVB does not support custom plugins or inline JavaScript. diff --git a/docs/visualize/vega.asciidoc b/docs/visualize/vega.asciidoc deleted file mode 100644 index b231159e86bde..0000000000000 --- a/docs/visualize/vega.asciidoc +++ /dev/null @@ -1,1639 +0,0 @@ -[[vega-graph]] -== Vega - -Build custom visualizations using Vega and Vega-Lite, backed by one or more -data sources including {es}, Elastic Map Service, URL, -or static data. Use the {kib} extensions to Vega to embed Vega into -your dashboard, and to add interactivity to the visualizations. - -Vega and Vega-Lite are both declarative formats to create visualizations -using JSON. Both use a different syntax for declaring visualizations, -and are not fully interchangeable. - -[float] -[[when-to-vega]] -=== When to use Vega - -Vega and Vega-Lite are capable of building most of the visualizations -that {kib} provides, but with higher complexity. The most common reason -to use Vega in {kib} is that {kib} is missing support for the query or -visualization, for example: - -* Aggregations using the `nested` or `parent/child` mapping -* Aggregations without a {kib} index pattern -* Queries using custom time filters -* Complex calculations -* Extracting data from _source instead of aggregation -* Scatter charts -* Sankey charts -* Custom maps -* Using a visual theme that {kib} does not provide - -[[vega-lite-tutorial]] -=== Tutorial: First visualization in Vega-Lite - -In this tutorial, you will learn about how to edit Vega-Lite in {kib} to create -a stacked area chart from an {es} search query. It will give you a starting point -for a more comprehensive -https://vega.github.io/vega-lite/tutorials/getting_started.html[introduction to Vega-Lite], -while only covering the basics. - -In this tutorial, you will build a stacked area chart from one of the {kib} sample data -sets. - -[role="screenshot"] -image::visualize/images/vega_lite_tutorial_1.png[] - -Before beginning this tutorial, install the <> -set. - -When you first open the Vega editor in {kib}, you will see a pre-populated -line chart which shows the total number of documents across all your indices -within the time range. - -[role="screenshot"] -image::visualize/images/vega_lite_default.png[] - -The text editor contains a Vega-Lite spec written in https://hjson.github.io/[HJSON], -which is similar to JSON but optimized for human editing. HJSON supports: - -* Comments using // or /* syntax -* Object keys without quotes -* String values without quotes -* Optional commas -* Double or single quotes -* Multiline strings - -[float] -==== Small steps - -Always work on Vega in the smallest steps possible, and save your work frequently. -Small changes will cause unexpected results. Click the "Save" button now. - -The first step is to change the index to one of the <> -sets. Change - -```yaml -index: _all -``` - -to: - -```yaml -index: kibana_sample_data_ecommerce -``` - -Click "Update". The result is probably not what you expect. You should see a flat -line with 0 results. - -You've only changed the index, so the difference must be the query is returning -no results. You can try the <>, -but intuition may be faster for this particular problem. - -In this case, the problem is that you are querying the field `@timestamp`, -which does not exist in the `kibana_sample_data_ecommerce` data. Find and replace -`@timestamp` with `order_date`. This fixes the problem, leaving you with this spec: - -.Expand Vega-Lite spec -[%collapsible%closed] -==== -[source,yaml] ----- -{ - $schema: https://vega.github.io/schema/vega-lite/v4.json - title: Event counts from ecommerce - data: { - url: { - %context%: true - %timefield%: order_date - index: kibana_sample_data_ecommerce - body: { - aggs: { - time_buckets: { - date_histogram: { - field: order_date - interval: {%autointerval%: true} - extended_bounds: { - min: {%timefilter%: "min"} - max: {%timefilter%: "max"} - } - min_doc_count: 0 - } - } - } - size: 0 - } - } - format: {property: "aggregations.time_buckets.buckets" } - } - - mark: line - - encoding: { - x: { - field: key - type: temporal - axis: { title: null } - } - y: { - field: doc_count - type: quantitative - axis: { title: "Document count" } - } - } -} ----- - -==== - -Now, let's make the visualization more interesting by adding another aggregation -to create a stacked area chart. To verify that you have constructed the right -query, it is easiest to use the {kib} Dev Tools in a separate tab from the -Vega editor. Open the Dev Tools from the Management section of the navigation. - -This query is roughly equivalent to the one that is used in the default -Vega-Lite spec. Copy it into the Dev Tools: - -```js -POST kibana_sample_data_ecommerce/_search -{ - "query": { - "range": { - "order_date": { - "gte": "now-7d" - } - } - }, - "aggs": { - "time_buckets": { - "date_histogram": { - "field": "order_date", - "fixed_interval": "1d", - "extended_bounds": { - "min": "now-7d" - }, - "min_doc_count": 0 - } - } - }, - "size": 0 -} -``` - -There's not enough data to create a stacked bar in the original query, so we -will add a new -{ref}/search-aggregations-bucket-terms-aggregation.html[terms aggregation]: - -```js -POST kibana_sample_data_ecommerce/_search -{ - "query": { - "range": { - "order_date": { - "gte": "now-7d" - } - } - }, - "aggs": { - "categories": { - "terms": { "field": "category.keyword" }, - "aggs": { - "time_buckets": { - "date_histogram": { - "field": "order_date", - "fixed_interval": "1d", - "extended_bounds": { - "min": "now-7d" - }, - "min_doc_count": 0 - } - } - } - } - }, - "size": 0 -} -``` - -You'll see that the response format looks different from the previous query: - -```json -{ - "aggregations" : { - "categories" : { - "doc_count_error_upper_bound" : 0, - "sum_other_doc_count" : 0, - "buckets" : [{ - "key" : "Men's Clothing", - "doc_count" : 1661, - "time_buckets" : { - "buckets" : [{ - "key_as_string" : "2020-06-30T00:00:00.000Z", - "key" : 1593475200000, - "doc_count" : 19 - }, { - "key_as_string" : "2020-07-01T00:00:00.000Z", - "key" : 1593561600000, - "doc_count" : 71 - }] - } - }] - } - } -} -``` - -Now that we have data that we're happy with, it's time to convert from an -isolated {es} query into a query with {kib} integration. Looking at the -<>, you will -see the full list of special tokens that are used in this query, such -as `%context: true`. This query has also replaced `"fixed_interval": "1d"` -with `interval: {%autointerval%: true}`. Copy the final query into -your spec: - -```yaml - data: { - url: { - %context%: true - %timefield%: order_date - index: kibana_sample_data_ecommerce - body: { - aggs: { - categories: { - terms: { field: "category.keyword" } - aggs: { - time_buckets: { - date_histogram: { - field: order_date - interval: {%autointerval%: true} - extended_bounds: { - min: {%timefilter%: "min"} - max: {%timefilter%: "max"} - } - min_doc_count: 0 - } - } - } - } - } - size: 0 - } - } - format: {property: "aggregations.categories.buckets" } - } -``` - -If you copy and paste that into your Vega-Lite spec, and click "Update", -you will see a warning saying `Infinite extent for field "key": [Infinity, -Infinity]`. -Let's use our <> to understand why. - -Vega-Lite generates data using the names `source_0` and `data_0`. `source_0` contains -the results from the {es} query, and `data_0` contains the visually encoded results -which are shown in the chart. To debug this problem, you need to compare both. - -To look at the source, open the browser dev tools console and type -`VEGA_DEBUG.view.data('source_0')`. You will see: - -```js -[{ - doc_count: 454 - key: "Men's Clothing" - time_buckets: {buckets: Array(57)} - Symbol(vega_id): 12822 -}, ...] -``` - -To compare to the visually encoded data, open the browser dev tools console and type -`VEGA_DEBUG.view.data('data_0')`. You will see: - -```js -[{ - doc_count: 454 - key: NaN - time_buckets: {buckets: Array(57)} - Symbol(vega_id): 13879 -}] -``` - -The issue seems to be that the `key` property is not being converted the right way, -which makes sense because the `key` is now `Men's Clothing` instead of a timestamp. - -To fix this, try updating the `encoding` of your Vega-Lite spec to: - -```yaml - encoding: { - x: { - field: time_buckets.buckets.key - type: temporal - axis: { title: null } - } - y: { - field: time_buckets.buckets.doc_count - type: quantitative - axis: { title: "Document count" } - } - } -``` - -This will show more errors, and you can inspect `VEGA_DEBUG.view.data('data_0')` to -understand why. This now shows: - -```js -[{ - doc_count: 454 - key: "Men's Clothing" - time_buckets: {buckets: Array(57)} - time_buckets.buckets.doc_count: undefined - time_buckets.buckets.key: null - Symbol(vega_id): 14094 -}] -``` - -It looks like the problem is that the `time_buckets` inner array is not being -extracted by Vega. The solution is to use a Vega-lite -https://vega.github.io/vega-lite/docs/flatten.html[flatten transformation], available in {kib} 7.9 and later. -If using an older version of Kibana, the flatten transformation is available in Vega -but not Vega-Lite. - -Add this section in between the `data` and `encoding` section: - -```yaml - transform: [{ - flatten: ["time_buckets.buckets"] - }] -``` - -This does not yet produce the results you expect. Inspect the transformed data -by typing `VEGA_DEBUG.view.data('data_0')` into the console again: - -```js -[{ - doc_count: 453 - key: "Men's Clothing" - time_bucket.buckets.doc_count: undefined - time_buckets: {buckets: Array(57)} - time_buckets.buckets: { - key_as_string: "2020-06-30T15:00:00.000Z", - key: 1593529200000, - doc_count: 2 - } - time_buckets.buckets.key: null - Symbol(vega_id): 21564 -}] -``` - -The debug view shows `undefined` values where you would expect to see numbers, and -the cause is that there are duplicate names which are confusing Vega-Lite. This can -be fixed by making this change to the `transform` and `encoding` blocks: - -```yaml - transform: [{ - flatten: ["time_buckets.buckets"], - as: ["buckets"] - }] - - mark: area - - encoding: { - x: { - field: buckets.key - type: temporal - axis: { title: null } - } - y: { - field: buckets.doc_count - type: quantitative - axis: { title: "Document count" } - } - color: { - field: key - type: nominal - } - } -``` - -At this point, you have a stacked area chart that shows the top categories, -but the chart is still missing some common features that we expect from a {kib} -visualization. Let's add hover states and tooltips next. - -Hover states are handled differently in Vega-Lite and Vega. In Vega-Lite this is -done using a concept called `selection`, which has many permutations that are not -covered in this tutorial. We will be adding a simple tooltip and hover state. - -Because {kib} has enabled the https://vega.github.io/vega-lite/docs/tooltip.html[Vega tooltip plugin], -tooltips can be defined in several ways: - -* Automatic tooltip based on the data, via `{ content: "data" }` -* Array of fields, like `[{ field: "key", type: "nominal" }]` -* Defining a custom Javascript object using the `calculate` transform - -For the simple tooltip, add this to your encoding: - -```yaml - encoding: { - tooltip: [{ - field: buckets.key - type: temporal - title: "Date" - }, { - field: key - type: nominal - title: "Category" - }, { - field: buckets.doc_count - type: quantitative - title: "Count" - }] - } -``` - -As you hover over the area series in your chart, a multi-line tooltip will -appear, but it won't indicate the nearest point that it's pointing to. To -indicate the nearest point, we need to add a second layer. - -The first step is to remove the `mark: area` from your visualization. -Once you've removed the previous mark, add a composite mark at the end of -the Vega-Lite spec: - -```yaml - layer: [{ - mark: area - }, { - mark: point - }] -``` - -You'll see that the points are not appearing to line up with the area chart, -and the reason is that the points are not being stacked. Change your Y encoding -to this: - -```yaml - y: { - field: buckets.doc_count - type: quantitative - axis: { title: "Document count" } - stack: true - } -``` - -Now, we will add a `selection` block inside the point mark: - -```yaml - layer: [{ - mark: area - }, { - mark: point - - selection: { - pointhover: { - type: single - on: mouseover - clear: mouseout - empty: none - fields: ["buckets.key", "key"] - nearest: true - } - } - - encoding: { - size: { - condition: { - selection: pointhover - value: 100 - } - value: 5 - } - fill: { - condition: { - selection: pointhover - value: white - } - } - } - }] -``` - -Now that you've enabled a selection, try moving the mouse around the visualization -and seeing the points respond to the nearest position: - -[role="screenshot"] -image::visualize/images/vega_lite_tutorial_2.png[] - -The final result of this tutorial is this spec: - -.Expand final Vega-Lite spec -[%collapsible%closed] -==== -[source,yaml] ----- -{ - $schema: https://vega.github.io/schema/vega-lite/v4.json - title: Event counts from ecommerce - data: { - url: { - %context%: true - %timefield%: order_date - index: kibana_sample_data_ecommerce - body: { - aggs: { - categories: { - terms: { field: "category.keyword" } - aggs: { - time_buckets: { - date_histogram: { - field: order_date - interval: {%autointerval%: true} - extended_bounds: { - min: {%timefilter%: "min"} - max: {%timefilter%: "max"} - } - min_doc_count: 0 - } - } - } - } - } - size: 0 - } - } - format: {property: "aggregations.categories.buckets" } - } - - transform: [{ - flatten: ["time_buckets.buckets"] - as: ["buckets"] - }] - - encoding: { - x: { - field: buckets.key - type: temporal - axis: { title: null } - } - y: { - field: buckets.doc_count - type: quantitative - axis: { title: "Document count" } - stack: true - } - color: { - field: key - type: nominal - title: "Category" - } - tooltip: [{ - field: buckets.key - type: temporal - title: "Date" - }, { - field: key - type: nominal - title: "Category" - }, { - field: buckets.doc_count - type: quantitative - title: "Count" - }] - } - - layer: [{ - mark: area - }, { - mark: point - - selection: { - pointhover: { - type: single - on: mouseover - clear: mouseout - empty: none - fields: ["buckets.key", "key"] - nearest: true - } - } - - encoding: { - size: { - condition: { - selection: pointhover - value: 100 - } - value: 5 - } - fill: { - condition: { - selection: pointhover - value: white - } - } - } - }] -} ----- - -==== - -[[vega-tutorial]] -=== Tutorial: Updating {kib} filters from Vega - -In this tutorial you will build an area chart in Vega using an {es} search query, -and add a click handler and drag handler to update {kib} filters. -This tutorial is not a full https://vega.github.io/vega/tutorials/[Vega tutorial], -but will cover the basics of creating Vega visualizations into {kib}. - -First, create an almost-blank Vega chart by pasting this into the editor: - -```yaml -{ - $schema: "https://vega.github.io/schema/vega/v5.json" - data: [{ - name: source_0 - }] - - scales: [{ - name: x - type: time - range: width - }, { - name: y - type: linear - range: height - }] - - axes: [{ - orient: bottom - scale: x - }, { - orient: left - scale: y - }] - - marks: [ - { - type: area - from: { - data: source_0 - } - encode: { - update: { - } - } - } - ] -} -``` - -Despite being almost blank, this Vega spec still contains the minimum requirements: - -* Data -* Scales -* Marks -* (optional) Axes - -Next, add a valid {es} search query in the `data` block: - -```yaml - data: [ - { - name: source_0 - url: { - %context%: true - %timefield%: order_date - index: kibana_sample_data_ecommerce - body: { - aggs: { - time_buckets: { - date_histogram: { - field: order_date - fixed_interval: "3h" - extended_bounds: { - min: {%timefilter%: "min"} - max: {%timefilter%: "max"} - } - min_doc_count: 0 - } - } - } - size: 0 - } - } - format: { property: "aggregations.time_buckets.buckets" } - } - ] -``` - -Click "Update", and nothing will change in the visualization. The first step -is to change the X and Y scales based on the data: - -```yaml - scales: [{ - name: x - type: time - range: width - domain: { - data: source_0 - field: key - } - }, { - name: y - type: linear - range: height - domain: { - data: source_0 - field: doc_count - } - }] -``` - -Click "Update", and you will see that the X and Y axes are now showing labels based -on the real data. - -Next, encode the fields `key` and `doc_count` as the X and Y values: - -```yaml - marks: [ - { - type: area - from: { - data: source_0 - } - encode: { - update: { - x: { - scale: x - field: key - } - y: { - scale: y - value: 0 - } - y2: { - scale: y - field: doc_count - } - } - } - } - ] -``` - -Click "Update" and you will get a basic area chart: - -[role="screenshot"] -image::visualize/images/vega_tutorial_3.png[] - -Next, add a new block to the `marks` section. This will show clickable points to filter for a specific -date: - -```yaml - { - name: point - type: symbol - style: ["point"] - from: { - data: source_0 - } - encode: { - update: { - x: { - scale: x - field: key - } - y: { - scale: y - field: doc_count - } - size: { - value: 100 - } - fill: { - value: black - } - } - } - } -``` - -Next, we will create a Vega signal to make the points clickable. You can access -the clicked `datum` in the expression used to update. In this case, you want -clicks on points to add a time filter with the 3-hour interval defined above. - -```yaml - signals: [ - { - name: point_click - on: [{ - events: { - source: scope - type: click - markname: point - } - update: '''kibanaSetTimeFilter(datum.key, datum.key + 3 * 60 * 60 * 1000)''' - }] - } - ] -``` - -This event is using the {kib} custom function `kibanaSetTimeFilter` to generate a filter that -gets applied to the entire dashboard on click. - -The mouse cursor does not currently indicate that the chart is interactive. Find the `marks` section, -and update the mark named `point` by adding `cursor: { value: "pointer" }` to -the `encoding` section like this: - -```yaml - { - name: point - type: symbol - style: ["point"] - from: { - data: source_0 - } - encode: { - update: { - ... - cursor: { value: "pointer" } - } - } - } -``` - -Next, we will add a drag interaction which will allow the user to narrow into -a specific time range in the visualization. This will require adding more signals, and -adding a rectangle overlay: - -[role="screenshot"] -image::visualize/images/vega_tutorial_4.png[] - -The first step is to add a new `signal` to track the X position of the cursor: - -```yaml - { - name: currentX - value: -1 - on: [{ - events: { - type: mousemove - source: view - }, - update: "clamp(x(), 0, width)" - }, { - events: { - type: mouseout - source: view - } - update: "-1" - }] - } -``` - -Now add a new `mark` to indicate the current cursor position: - -```yaml - { - type: rule - interactive: false - encode: { - update: { - y: {value: 0} - y2: {signal: "height"} - stroke: {value: "gray"} - strokeDash: { - value: [2, 1] - } - x: {signal: "max(currentX,0)"} - defined: {signal: "currentX > 0"} - } - } - } -``` - -Next, add a signal to track the current selected range, which will update -until the user releases the mouse button or uses the escape key: - - -```yaml - { - name: selected - value: [0, 0] - on: [{ - events: { - type: mousedown - source: view - } - update: "[clamp(x(), 0, width), clamp(x(), 0, width)]" - }, { - events: { - type: mousemove - source: window - consume: true - between: [{ - type: mousedown - source: view - }, { - merge: [{ - type: mouseup - source: window - }, { - type: keydown - source: window - filter: "event.key === 'Escape'" - }] - }] - } - update: "[selected[0], clamp(x(), 0, width)]" - }, { - events: { - type: keydown - source: window - filter: "event.key === 'Escape'" - } - update: "[0, 0]" - }] - } -``` - -Now that there is a signal which tracks the time range from the user, we need to indicate -the range visually by adding a new mark which only appears conditionally: - -```yaml - { - type: rect - name: selectedRect - encode: { - update: { - height: {signal: "height"} - fill: {value: "#333"} - fillOpacity: {value: 0.2} - x: {signal: "selected[0]"} - x2: {signal: "selected[1]"} - defined: {signal: "selected[0] !== selected[1]"} - } - } - } -``` - -Finally, add a new signal which will update the {kib} time filter when the mouse is released while -dragging: - -```yaml - { - name: applyTimeFilter - value: null - on: [{ - events: { - type: mouseup - source: view - } - update: '''selected[0] !== selected[1] ? kibanaSetTimeFilter( - invert('x',selected[0]), - invert('x',selected[1])) : null''' - }] - } -``` - -Putting this all together, your visualization now supports the main features of -standard visualizations in {kib}, but with the potential to add even more control. -The final Vega spec for this tutorial is here: - -.Expand final Vega spec -[%collapsible%closed] -==== -[source,yaml] ----- -{ - $schema: "https://vega.github.io/schema/vega/v5.json" - data: [ - { - name: source_0 - url: { - %context%: true - %timefield%: order_date - index: kibana_sample_data_ecommerce - body: { - aggs: { - time_buckets: { - date_histogram: { - field: order_date - fixed_interval: "3h" - extended_bounds: { - min: {%timefilter%: "min"} - max: {%timefilter%: "max"} - } - min_doc_count: 0 - } - } - } - size: 0 - } - } - format: { property: "aggregations.time_buckets.buckets" } - } - ] - - scales: [{ - name: x - type: time - range: width - domain: { - data: source_0 - field: key - } - }, { - name: y - type: linear - range: height - domain: { - data: source_0 - field: doc_count - } - }] - - axes: [{ - orient: bottom - scale: x - }, { - orient: left - scale: y - }] - - marks: [ - { - type: area - from: { - data: source_0 - } - encode: { - update: { - x: { - scale: x - field: key - } - y: { - scale: y - value: 0 - } - y2: { - scale: y - field: doc_count - } - } - } - }, - { - name: point - type: symbol - style: ["point"] - from: { - data: source_0 - } - encode: { - update: { - x: { - scale: x - field: key - } - y: { - scale: y - field: doc_count - } - size: { - value: 100 - } - fill: { - value: black - } - cursor: { value: "pointer" } - } - } - }, - { - type: rule - interactive: false - encode: { - update: { - y: {value: 0} - y2: {signal: "height"} - stroke: {value: "gray"} - strokeDash: { - value: [2, 1] - } - x: {signal: "max(currentX,0)"} - defined: {signal: "currentX > 0"} - } - } - }, - { - type: rect - name: selectedRect - encode: { - update: { - height: {signal: "height"} - fill: {value: "#333"} - fillOpacity: {value: 0.2} - x: {signal: "selected[0]"} - x2: {signal: "selected[1]"} - defined: {signal: "selected[0] !== selected[1]"} - } - } - } - ] - - signals: [ - { - name: point_click - on: [{ - events: { - source: scope - type: click - markname: point - } - update: '''kibanaSetTimeFilter(datum.key, datum.key + 3 * 60 * 60 * 1000)''' - }] - } - { - name: currentX - value: -1 - on: [{ - events: { - type: mousemove - source: view - }, - update: "clamp(x(), 0, width)" - }, { - events: { - type: mouseout - source: view - } - update: "-1" - }] - } - { - name: selected - value: [0, 0] - on: [{ - events: { - type: mousedown - source: view - } - update: "[clamp(x(), 0, width), clamp(x(), 0, width)]" - }, { - events: { - type: mousemove - source: window - consume: true - between: [{ - type: mousedown - source: view - }, { - merge: [{ - type: mouseup - source: window - }, { - type: keydown - source: window - filter: "event.key === 'Escape'" - }] - }] - } - update: "[selected[0], clamp(x(), 0, width)]" - }, { - events: { - type: keydown - source: window - filter: "event.key === 'Escape'" - } - update: "[0, 0]" - }] - } - { - name: applyTimeFilter - value: null - on: [{ - events: { - type: mouseup - source: view - } - update: '''selected[0] !== selected[1] ? kibanaSetTimeFilter( - invert('x',selected[0]), - invert('x',selected[1])) : null''' - }] - } - ] -} - ----- -==== - -[[vega-reference]] -=== Reference for {kib} extensions - -{kib} has extended Vega and Vega-Lite with extensions that support: - -* Default height and width -* Default theme to match {kib} -* Writing {es} queries using the time range and filters from dashboards -* Using the Elastic Map Service in Vega maps -* Additional tooltip styling -* Advanced setting to enable URL loading from any domain -* Limited debugging support using the browser dev tools -* (Vega only) Expression functions which can update the time range and dashboard filters - -[[vega-sizing-and-positioning]] -==== Default height and width - -By default, Vega visualizations use the `autosize = { type: 'fit', contains: 'padding' }` layout. -`fit` uses all available space, ignores `width` and `height` values, -and respects the padding values. To override this behavior, change the -`autosize` value. - -[[vega-theme]] -==== Default theme to match {kib} - -{kib} registers a default https://vega.github.io/vega/docs/schemes/[Vega color scheme] -with the id `elastic`, and sets a default color for each `mark` type. -Override it by providing a different `stroke`, `fill`, or `color` (Vega-Lite) value. - -[[vega-queries]] -==== Writing {es} queries in Vega - -{kib} extends the Vega https://vega.github.io/vega/docs/data/[data] elements -with support for direct {es} queries specified as a `url`. - -Because of this, {kib} is **unable to support dynamically loaded data**, -which would otherwise work in Vega. All data is fetched before it's passed to -the Vega renderer. - -To define an {es} query in Vega, set the `url` to an object. {kib} will parse -the object looking for special tokens that allow your query to integrate with {kib}. -These tokens are: - -* `%context%: true`: Set at the top level, and replaces the `query` section with filters from dashboard -* `%timefield%: `: Set at the top level, integrates the query with the dashboard time filter -* `{%timefilter%: true}`: Replaced by an {es} range query with upper and lower bounds -* `{%timefilter%: "min" | "max"}`: Replaced only by the upper or lower bounds -* `{%timefilter: true, shift: -1, unit: 'hour'}`: Generates a time range query one hour in the past -* `{%autointerval%: true}`: Replaced by the string which contains the automatic {kib} time interval, such as `1h` -* `{%autointerval%: 10}`: Replaced by a string which is approximately dividing the time into 10 ranges, allowing - you to influence the automatic interval -* `"%dashboard_context-must_clause%"`: String replaced by object containing filters -* `"%dashboard_context-filter_clause%"`: String replaced by an object containing filters -* `"%dashboard_context-must_not_clause%"`: String replaced by an object containing filters - -Putting this together, an example query that counts the number of documents in -a specific index: - -[source,yaml] ----- -// An object instead of a string for the URL value -// is treated as a context-aware Elasticsearch query. -url: { - // Specify the time filter. - %timefield%: @timestamp - // Apply dashboard context filters when set - %context%: true - - // Which indexes to search - index: kibana_sample_data_logs - // The body element may contain "aggs" and "query" keys - body: { - aggs: { - time_buckets: { - date_histogram: { - // Use date histogram aggregation on @timestamp field - field: @timestamp <1> - // interval value will depend on the time filter - // Use an integer to set approximate bucket count - interval: { %autointerval%: true } - // Make sure we get an entire range, even if it has no data - extended_bounds: { - min: { %timefilter%: "min" } - max: { %timefilter%: "max" } - } - // Use this for linear (e.g. line, area) graphs - // Without it, empty buckets will not show up - min_doc_count: 0 - } - } - } - // Speed up the response by only including aggregation results - size: 0 - } -} ----- - -<1> `@timestamp` — Filters the time range and breaks it into histogram -buckets. - -The full result includes the following structure: - -[source,yaml] ----- -{ - "aggregations": { - "time_buckets": { - "buckets": [{ - "key_as_string": "2015-11-30T22:00:00.000Z", - "key": 1448920800000,<1> - "doc_count": 28 - }, { - "key_as_string": "2015-11-30T23:00:00.000Z", - "key": 1448924400000, <1> - "doc_count": 330 - }, ... ----- - -<1> `"key"` — The unix timestamp you can use without conversions by the -Vega date expressions. - -For most visualizations, you only need the list of bucket values. To focus on -only the data you need, use `format: {property: "aggregations.time_buckets.buckets"}`. - -Specify a query with individual range and dashboard context. The query is -equivalent to `"%context%": true, "%timefield%": "@timestamp"`, -except that the time range is shifted back by 10 minutes: - -[source,yaml] ----- -{ - body: { - query: { - bool: { - must: [ - // This string will be replaced - // with the auto-generated "MUST" clause - "%dashboard_context-must_clause%" - { - range: { - // apply timefilter (upper right corner) - // to the @timestamp variable - @timestamp: { - // "%timefilter%" will be replaced with - // the current values of the time filter - // (from the upper right corner) - "%timefilter%": true - // Only work with %timefilter% - // Shift current timefilter by 10 units back - shift: 10 - // week, day (default), hour, minute, second - unit: minute - } - } - } - ] - must_not: [ - // This string will be replaced with - // the auto-generated "MUST-NOT" clause - "%dashboard_context-must_not_clause%" - ] - filter: [ - // This string will be replaced - // with the auto-generated "FILTER" clause - "%dashboard_context-filter_clause%" - ] - } - } - } -} ----- - -NOTE: When using `"%context%": true` or defining a value for `"%timefield%"` the body cannot contain a query. To customize the query within the VEGA specification (e.g. add an additional filter, or shift the timefilter), define your query and use the placeholders as in the example above. The placeholders will be replaced by the actual context of the dashboard or visualization once parsed. - -The `"%timefilter%"` can also be used to specify a single min or max -value. The date_histogram's `extended_bounds` can be set -with two values - min and max. Instead of hardcoding a value, you may -use `"min": {"%timefilter%": "min"}`, which will be replaced with the -beginning of the current time range. The `shift` and `unit` values are -also supported. The `"interval"` can also be set dynamically, depending -on the currently picked range: `"interval": {"%autointerval%": 10}` will -try to get about 10-15 data points (buckets). - -[float] -[[vega-esmfiles]] -=== Access Elastic Map Service files - -Access the Elastic Map Service files via the same mechanism: - -[source,yaml] ----- -url: { - // "type" defaults to "elasticsearch" otherwise - type: emsfile - // Name of the file, exactly as in the Region map visualization - name: World Countries -} -// The result is a geojson file, get its features to use -// this data source with the "shape" marks -// https://vega.github.io/vega/docs/marks/shape/ -format: {property: "features"} ----- - -To enable Maps, the graph must specify `type=map` in the host -configuration: - -[source,yaml] ----- -{ - "config": { - "kibana": { - "type": "map", - - // Initial map position - "latitude": 40.7, // default 0 - "longitude": -74, // default 0 - "zoom": 7, // default 2 - - // defaults to "default". Use false to disable base layer. - "mapStyle": false, - - // default 0 - "minZoom": 5, - - // defaults to the maximum for the given style, - // or 25 when base is disabled - "maxZoom": 13, - - // defaults to true, shows +/- buttons to zoom in/out - "zoomControl": false, - - // Defaults to 'false', disables mouse wheel zoom. If set to - // 'true', map may zoom unexpectedly while scrolling dashboard - "scrollWheelZoom": false, - - // When false, repaints on each move frame. - // Makes the graph slower when moving the map - "delayRepaint": true, // default true - } - }, - /* the rest of Vega JSON */ -} ----- - -The visualization automatically injects a `"projection"`, which you can use to -calculate the position of all geo-aware marks. -Additionally, you can use `latitude`, `longitude`, and `zoom` signals. -These signals can be used in the graph, or can be updated to modify the -position of the map. - -[float] -[[vega-tooltip]] -==== Additional tooltip styling - -{kib} has installed the https://vega.github.io/vega-lite/docs/tooltip.html[Vega tooltip plugin], -so tooltips can be defined in the ways documented there. Beyond that, {kib} also supports -a configuration option for changing the tooltip position and padding: - -```js -{ - config: { - kibana: { - tooltips: { - position: 'top', - padding: 15 - } - } - } -} -``` - -[[vega-url-loading]] -==== Advanced setting to enable URL loading from any domain - -Vega can load data from any URL, but this is disabled by default in {kib}. -To change this, set `vis_type_vega.enableExternalUrls: true` in `kibana.yml`, -then restart {kib}. - -[[vega-inspector]] -==== Vega Inspector -Use the contextual *Inspect* tool to gain insights into different elements. -For Vega visualizations, there are two different views: *Request* and *Vega debug*. - -===== Inspect Elasticsearch requests - -Vega uses the {ref}/search-search.html[{es} search API] to get documents and aggregation -results from {es}. To troubleshoot these requests, click *Inspect*, which shows the most recent requests. -In case your specification has more than one request, you can switch between the views using the *View* dropdown. - -[role="screenshot"] -image::visualize/images/vega_tutorial_inspect_requests.png[] - -===== Vega debugging - -With the *Vega debug* view, you can inspect the *Data sets* and *Signal Values* runtime data. - -The runtime data is read from the -https://vega.github.io/vega/docs/api/debugging/#scope[runtime scope]. - -[role="screenshot"] -image::visualize/images/vega_tutorial_inspect_data_sets.png[] - -To debug more complex specs, access to the `view` variable. For more information, refer to -the <>. - -===== Asking for help with a Vega spec - -Because of the dynamic nature of the data in {es}, it is hard to help you with -Vega specs unless you can share a dataset. To do this, click *Inspect*, select the *Vega debug* view, -then select the *Spec* tab: - -[role="screenshot"] -image::visualize/images/vega_tutorial_getting_help.png[] - -To copy the response, click *Copy to clipboard*. Paste the copied data to -https://gist.github.com/[gist.github.com], possibly with a .json extension. Use the [raw] button, -and share that when asking for help. - -[[vega-browser-debugging-console]] -==== Browser debugging console - -experimental[] Use browser debugging tools (for example, F12 or Ctrl+Shift+J in Chrome) to -inspect the `VEGA_DEBUG` variable: - -* `view` — Access to the Vega View object. See https://vega.github.io/vega/docs/api/debugging/[Vega Debugging Guide] -on how to inspect data and signals at runtime. For Vega-Lite, -`VEGA_DEBUG.view.data('source_0')` gets the pre-transformed data, and `VEGA_DEBUG.view.data('data_0')` -gets the encoded data. For Vega, it uses the data name as defined in your Vega spec. - -* `vega_spec` — Vega JSON graph specification after some modifications by {kib}. In case -of Vega-Lite, this is the output of the Vega-Lite compiler. - -* `vegalite_spec` — If this is a Vega-Lite graph, JSON specification of the graph before -Vega-Lite compilation. - -[float] -[[vega-expression-functions]] -==== (Vega only) Expression functions which can update the time range and dashboard filters - -{kib} has extended the Vega expression language with these functions: - -```js -/** - * @param {object} query Elastic Query DSL snippet, as used in the query DSL editor - * @param {string} [index] as defined in Kibana, or default if missing - */ -kibanaAddFilter(query, index) - -/** - * @param {object} query Elastic Query DSL snippet, as used in the query DSL editor - * @param {string} [index] as defined in Kibana, or default if missing - */ -kibanaRemoveFilter(query, index) - -kibanaRemoveAllFilters() - -/** - * Update dashboard time filter to the new values - * @param {number|string|Date} start - * @param {number|string|Date} end - */ -kibanaSetTimeFilter(start, end) -``` - -[float] -[[vega-additional-configuration-options]] -==== Additional configuration options - -[source,yaml] ----- -{ - config: { - kibana: { - // Placement of the Vega-defined signal bindings. - // Can be `left`, `right`, `top`, or `bottom` (default). - controlsLocation: top - // Can be `vertical` or `horizontal` (default). - controlsDirection: vertical - // If true, hides most of Vega and Vega-Lite warnings - hideWarnings: true - // Vega renderer to use: `svg` or `canvas` (default) - renderer: canvas - } - } -} ----- - - -[[vega-notes]] -[[vega-useful-links]] -=== Resources and examples - -To learn more about Vega and Vega-Lite, refer to the resources and examples. - -==== Vega editor -The https://vega.github.io/editor/[Vega Editor] includes examples for Vega & Vega-Lite, but does not support any -{kib}-specific features like {es} requests and interactive base maps. - -==== Vega-Lite resources -* https://vega.github.io/vega-lite/tutorials/getting_started.html[Tutorials] -* https://vega.github.io/vega-lite/docs/[Docs] -* https://vega.github.io/vega-lite/examples/[Examples] - -==== Vega resources -* https://vega.github.io/vega/tutorials/[Tutorials] -* https://vega.github.io/vega/docs/[Docs] -* https://vega.github.io/vega/examples/[Examples] - -TIP: When you use the examples in {kib}, you may -need to modify the "data" section to use absolute URL. For example, -replace `"url": "data/world-110m.json"` with -`"url": "https://vega.github.io/editor/data/world-110m.json"`. diff --git a/examples/alerting_example/tsconfig.json b/examples/alerting_example/tsconfig.json index fbcec9de439bd..09c130aca4642 100644 --- a/examples/alerting_example/tsconfig.json +++ b/examples/alerting_example/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target" }, diff --git a/examples/bfetch_explorer/tsconfig.json b/examples/bfetch_explorer/tsconfig.json index d508076b33199..798a9c222c5ab 100644 --- a/examples/bfetch_explorer/tsconfig.json +++ b/examples/bfetch_explorer/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/dashboard_embeddable_examples/tsconfig.json b/examples/dashboard_embeddable_examples/tsconfig.json index d508076b33199..798a9c222c5ab 100644 --- a/examples/dashboard_embeddable_examples/tsconfig.json +++ b/examples/dashboard_embeddable_examples/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/developer_examples/tsconfig.json b/examples/developer_examples/tsconfig.json index d508076b33199..798a9c222c5ab 100644 --- a/examples/developer_examples/tsconfig.json +++ b/examples/developer_examples/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/embeddable_examples/public/book/book_embeddable.tsx b/examples/embeddable_examples/public/book/book_embeddable.tsx index b033fe86cd1c7..48b81c27e8b8d 100644 --- a/examples/embeddable_examples/public/book/book_embeddable.tsx +++ b/examples/embeddable_examples/public/book/book_embeddable.tsx @@ -71,11 +71,7 @@ export class BookEmbeddable constructor( initialInput: BookEmbeddableInput, - private attributeService: AttributeService< - BookSavedObjectAttributes, - BookByValueInput, - BookByReferenceInput - >, + private attributeService: AttributeService, { parent, }: { @@ -99,18 +95,21 @@ export class BookEmbeddable }); } - inputIsRefType = (input: BookEmbeddableInput): input is BookByReferenceInput => { + readonly inputIsRefType = (input: BookEmbeddableInput): input is BookByReferenceInput => { return this.attributeService.inputIsRefType(input); }; - getInputAsValueType = async (): Promise => { + readonly getInputAsValueType = async (): Promise => { const input = this.attributeService.getExplicitInputFromEmbeddable(this); return this.attributeService.getInputAsValueType(input); }; - getInputAsRefType = async (): Promise => { + readonly getInputAsRefType = async (): Promise => { const input = this.attributeService.getExplicitInputFromEmbeddable(this); - return this.attributeService.getInputAsRefType(input, { showSaveModal: true }); + return this.attributeService.getInputAsRefType(input, { + showSaveModal: true, + saveModalTitle: this.getTitle(), + }); }; public render(node: HTMLElement) { diff --git a/examples/embeddable_examples/public/book/book_embeddable_factory.tsx b/examples/embeddable_examples/public/book/book_embeddable_factory.tsx index 4c144c3843c47..292261ee16c59 100644 --- a/examples/embeddable_examples/public/book/book_embeddable_factory.tsx +++ b/examples/embeddable_examples/public/book/book_embeddable_factory.tsx @@ -31,8 +31,6 @@ import { BOOK_EMBEDDABLE, BookEmbeddableInput, BookEmbeddableOutput, - BookByValueInput, - BookByReferenceInput, } from './book_embeddable'; import { CreateEditBookComponent } from './create_edit_book_component'; import { OverlayStart } from '../../../../src/core/public'; @@ -66,11 +64,7 @@ export class BookEmbeddableFactoryDefinition getIconForSavedObject: () => 'pencil', }; - private attributeService?: AttributeService< - BookSavedObjectAttributes, - BookByValueInput, - BookByReferenceInput - >; + private attributeService?: AttributeService; constructor(private getStartServices: () => Promise) {} @@ -126,9 +120,7 @@ export class BookEmbeddableFactoryDefinition private async getAttributeService() { if (!this.attributeService) { this.attributeService = await (await this.getStartServices()).getAttributeService< - BookSavedObjectAttributes, - BookByValueInput, - BookByReferenceInput + BookSavedObjectAttributes >(this.type); } return this.attributeService!; diff --git a/examples/embeddable_examples/public/book/edit_book_action.tsx b/examples/embeddable_examples/public/book/edit_book_action.tsx index 5b14dc85b1fc7..3541ace1e5e7e 100644 --- a/examples/embeddable_examples/public/book/edit_book_action.tsx +++ b/examples/embeddable_examples/public/book/edit_book_action.tsx @@ -57,13 +57,13 @@ export const createEditBookAction = (getStartServices: () => Promise { const { openModal, getAttributeService } = await getStartServices(); - const attributeService = getAttributeService< - BookSavedObjectAttributes, - BookByValueInput, - BookByReferenceInput - >(BOOK_SAVED_OBJECT); + const attributeService = getAttributeService(BOOK_SAVED_OBJECT); const onSave = async (attributes: BookSavedObjectAttributes, useRefType: boolean) => { - const newInput = await attributeService.wrapAttributes(attributes, useRefType, embeddable); + const newInput = await attributeService.wrapAttributes( + attributes, + useRefType, + attributeService.getExplicitInputFromEmbeddable(embeddable) + ); if (!useRefType && (embeddable.getInput() as SavedObjectEmbeddableInput).savedObjectId) { // Set the saved object ID to null so that update input will remove the existing savedObjectId... (newInput as BookByValueInput & { savedObjectId: unknown }).savedObjectId = null; diff --git a/examples/embeddable_examples/tsconfig.json b/examples/embeddable_examples/tsconfig.json index 7fa03739119b4..caeed2c1a434f 100644 --- a/examples/embeddable_examples/tsconfig.json +++ b/examples/embeddable_examples/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/embeddable_explorer/tsconfig.json b/examples/embeddable_explorer/tsconfig.json index d508076b33199..798a9c222c5ab 100644 --- a/examples/embeddable_explorer/tsconfig.json +++ b/examples/embeddable_explorer/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/routing_example/tsconfig.json b/examples/routing_example/tsconfig.json index 9bbd9021b2e0a..761a5c4da65ba 100644 --- a/examples/routing_example/tsconfig.json +++ b/examples/routing_example/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/search_examples/tsconfig.json b/examples/search_examples/tsconfig.json index 8a3ced743d0fa..8bec69ca40ccc 100644 --- a/examples/search_examples/tsconfig.json +++ b/examples/search_examples/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/state_containers_examples/tsconfig.json b/examples/state_containers_examples/tsconfig.json index 3f43072c2aade..007322e2d9525 100644 --- a/examples/state_containers_examples/tsconfig.json +++ b/examples/state_containers_examples/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/ui_action_examples/tsconfig.json b/examples/ui_action_examples/tsconfig.json index d508076b33199..798a9c222c5ab 100644 --- a/examples/ui_action_examples/tsconfig.json +++ b/examples/ui_action_examples/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/ui_actions_explorer/tsconfig.json b/examples/ui_actions_explorer/tsconfig.json index 199fbe1fcfa26..119209114a7bb 100644 --- a/examples/ui_actions_explorer/tsconfig.json +++ b/examples/ui_actions_explorer/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/url_generators_examples/tsconfig.json b/examples/url_generators_examples/tsconfig.json index 091130487791b..327b4642a8e7f 100644 --- a/examples/url_generators_examples/tsconfig.json +++ b/examples/url_generators_examples/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/examples/url_generators_explorer/tsconfig.json b/examples/url_generators_explorer/tsconfig.json index 091130487791b..327b4642a8e7f 100644 --- a/examples/url_generators_explorer/tsconfig.json +++ b/examples/url_generators_explorer/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "skipLibCheck": true diff --git a/kibana.d.ts b/kibana.d.ts index d64752abd8b60..517bda374af9d 100644 --- a/kibana.d.ts +++ b/kibana.d.ts @@ -39,8 +39,6 @@ export namespace Legacy { export type KibanaConfig = LegacyKibanaServer.KibanaConfig; export type Request = LegacyKibanaServer.Request; export type ResponseToolkit = LegacyKibanaServer.ResponseToolkit; - export type SavedObjectsClient = LegacyKibanaServer.SavedObjectsClient; - export type SavedObjectsService = LegacyKibanaServer.SavedObjectsLegacyService; export type Server = LegacyKibanaServer.Server; export type InitPluginFunction = LegacyKibanaPluginSpec.InitPluginFunction; diff --git a/package.json b/package.json index b8cf2e1e27774..95a6de337f62a 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,6 @@ "test:ftr:server": "node scripts/functional_tests_server", "test:ftr:runner": "node scripts/functional_test_runner", "test:coverage": "grunt test:coverage", - "typespec": "typings-tester --config x-pack/plugins/canvas/public/lib/aeroelastic/tsconfig.json x-pack/plugins/canvas/public/lib/aeroelastic/__fixtures__/typescript/typespec_tests.ts", "checkLicenses": "node scripts/check_licenses --dev", "build": "node scripts/build --all-platforms", "start": "node scripts/kibana --dev", @@ -66,7 +65,7 @@ "kbn:watch": "node scripts/kibana --dev --logging.json=false", "build:types": "tsc --p tsconfig.types.json", "docs:acceptApiChanges": "node --max-old-space-size=6144 scripts/check_published_api_changes.js --accept", - "kbn:bootstrap": "node scripts/register_git_hook", + "kbn:bootstrap": "node scripts/build_ts_refs && node scripts/register_git_hook", "spec_to_console": "node scripts/spec_to_console", "backport-skip-ci": "backport --prDescription \"[skip-ci]\"", "storybook": "node scripts/storybook", @@ -93,6 +92,7 @@ "**/istanbul-instrumenter-loader/schema-utils": "1.0.0", "**/image-diff/gm/debug": "^2.6.9", "**/load-grunt-config/lodash": "^4.17.20", + "**/node-jose/node-forge": "^0.10.0", "**/react-dom": "^16.12.0", "**/react": "^16.12.0", "**/react-test-renderer": "^16.12.0", @@ -117,7 +117,10 @@ "**/@types/*/**", "**/grunt-*", "**/grunt-*/**", - "x-pack/typescript" + "x-pack/typescript", + "@elastic/eui/rehype-react", + "@elastic/eui/remark-rehype", + "@elastic/eui/remark-rehype/**" ] }, "dependencies": { @@ -125,7 +128,7 @@ "@babel/register": "^7.10.5", "@elastic/datemath": "5.0.3", "@elastic/elasticsearch": "7.9.0-rc.2", - "@elastic/eui": "27.4.1", + "@elastic/eui": "28.2.0", "@elastic/good": "8.1.1-kibana2", "@elastic/numeral": "^2.5.0", "@elastic/request-crypto": "1.1.4", @@ -142,6 +145,7 @@ "@kbn/test-subj-selector": "0.2.1", "@kbn/ui-framework": "1.0.0", "@kbn/ui-shared-deps": "1.0.0", + "@types/yauzl": "^2.9.1", "JSONStream": "1.3.5", "abortcontroller-polyfill": "^1.4.0", "accept": "3.0.2", @@ -188,7 +192,7 @@ "moment-timezone": "^0.5.27", "mustache": "2.3.2", "node-fetch": "1.7.3", - "node-forge": "^0.9.1", + "node-forge": "^0.10.0", "opn": "^5.5.0", "oppsy": "^2.0.0", "p-map": "^4.0.0", @@ -227,7 +231,7 @@ "@babel/parser": "^7.11.2", "@babel/types": "^7.11.0", "@elastic/apm-rum": "^5.5.0", - "@elastic/charts": "19.8.1", + "@elastic/charts": "21.1.2", "@elastic/ems-client": "7.9.3", "@elastic/eslint-config-kibana": "0.15.0", "@elastic/eslint-plugin-eui": "0.0.2", @@ -302,7 +306,7 @@ "@types/moment-timezone": "^0.5.12", "@types/mustache": "^0.8.31", "@types/node": ">=10.17.17 <10.20.0", - "@types/node-forge": "^0.9.0", + "@types/node-forge": "^0.9.5", "@types/normalize-path": "^3.0.0", "@types/opn": "^5.1.0", "@types/pegjs": "^0.10.1", @@ -363,7 +367,6 @@ "dedent": "^0.7.0", "deepmerge": "^4.2.2", "delete-empty": "^2.0.0", - "elasticsearch-browser": "^16.7.0", "enzyme": "^3.11.0", "enzyme-adapter-react-16": "^1.15.2", "enzyme-adapter-utils": "^1.13.0", @@ -470,7 +473,6 @@ "topojson-client": "3.0.0", "tree-kill": "^1.2.2", "typescript": "4.0.2", - "typings-tester": "^0.3.2", "ui-select": "0.19.8", "vega": "^5.13.0", "vega-lite": "^4.13.1", diff --git a/packages/elastic-datemath/tsconfig.json b/packages/elastic-datemath/tsconfig.json index 3604f1004cf6c..cbfe1e8047433 100644 --- a/packages/elastic-datemath/tsconfig.json +++ b/packages/elastic-datemath/tsconfig.json @@ -1,6 +1,9 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/elastic-datemath" + }, "include": [ "index.d.ts" - ], + ] } diff --git a/packages/eslint-config-kibana/.eslintrc.js b/packages/elastic-eslint-config-kibana/.eslintrc.js similarity index 100% rename from packages/eslint-config-kibana/.eslintrc.js rename to packages/elastic-eslint-config-kibana/.eslintrc.js diff --git a/packages/eslint-config-kibana/.gitignore b/packages/elastic-eslint-config-kibana/.gitignore similarity index 100% rename from packages/eslint-config-kibana/.gitignore rename to packages/elastic-eslint-config-kibana/.gitignore diff --git a/packages/eslint-config-kibana/.npmignore b/packages/elastic-eslint-config-kibana/.npmignore similarity index 100% rename from packages/eslint-config-kibana/.npmignore rename to packages/elastic-eslint-config-kibana/.npmignore diff --git a/packages/elastic-eslint-config-kibana/README.md b/packages/elastic-eslint-config-kibana/README.md new file mode 100644 index 0000000000000..2049440cd8ff7 --- /dev/null +++ b/packages/elastic-eslint-config-kibana/README.md @@ -0,0 +1,31 @@ +# elastic-eslint-config-kibana + +The eslint config used by the kibana team + +## Usage + +To use this eslint config, just install the peer dependencies and reference it +in your `.eslintrc`: + +```javascript +{ + extends: [ + '@elastic/eslint-config-kibana' + ] +} +``` + +## Optional jest config + +If the project uses the [jest test runner](https://facebook.github.io/jest/), +the `@elastic/eslint-config-kibana/jest` config can be extended as well to use +`eslint-plugin-jest` and add settings specific to it: + +```javascript +{ + extends: [ + '@elastic/eslint-config-kibana', + '@elastic/eslint-config-kibana/jest' + ] +} +``` diff --git a/packages/eslint-config-kibana/javascript.js b/packages/elastic-eslint-config-kibana/javascript.js similarity index 100% rename from packages/eslint-config-kibana/javascript.js rename to packages/elastic-eslint-config-kibana/javascript.js diff --git a/packages/eslint-config-kibana/jest.js b/packages/elastic-eslint-config-kibana/jest.js similarity index 100% rename from packages/eslint-config-kibana/jest.js rename to packages/elastic-eslint-config-kibana/jest.js diff --git a/packages/elastic-eslint-config-kibana/package.json b/packages/elastic-eslint-config-kibana/package.json new file mode 100644 index 0000000000000..a4bb8d5449ee8 --- /dev/null +++ b/packages/elastic-eslint-config-kibana/package.json @@ -0,0 +1,34 @@ +{ + "name": "@elastic/eslint-config-kibana", + "version": "0.15.0", + "description": "The eslint config used by the kibana team", + "main": ".eslintrc.js", + "repository": { + "type": "git", + "url": "git+https://github.com/elastic/kibana.git" + }, + "keywords": [], + "author": "Spencer Alger ", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/elastic/kibana/tree/master/packages/elastic-eslint-config-kibana" + }, + "homepage": "https://github.com/elastic/kibana/tree/master/packages/elastic-eslint-config-kibana", + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "^3.10.0", + "@typescript-eslint/parser": "^3.10.0", + "babel-eslint": "^10.0.3", + "eslint": "^6.8.0", + "eslint-plugin-babel": "^5.3.0", + "eslint-plugin-ban": "^1.4.0", + "eslint-plugin-jsx-a11y": "^6.2.3", + "eslint-plugin-eslint-comments": "^3.2.0", + "eslint-plugin-import": "^2.19.1", + "eslint-plugin-jest": "^23.10.0", + "eslint-plugin-mocha": "^6.2.2", + "eslint-plugin-no-unsanitized": "^3.0.2", + "eslint-plugin-prefer-object-spread": "^1.2.1", + "eslint-plugin-react": "^7.17.0", + "eslint-plugin-react-hooks": "^4.0.4" + } +} diff --git a/packages/eslint-config-kibana/react.js b/packages/elastic-eslint-config-kibana/react.js similarity index 100% rename from packages/eslint-config-kibana/react.js rename to packages/elastic-eslint-config-kibana/react.js diff --git a/packages/eslint-config-kibana/restricted_globals.js b/packages/elastic-eslint-config-kibana/restricted_globals.js similarity index 100% rename from packages/eslint-config-kibana/restricted_globals.js rename to packages/elastic-eslint-config-kibana/restricted_globals.js diff --git a/packages/eslint-config-kibana/typescript.js b/packages/elastic-eslint-config-kibana/typescript.js similarity index 98% rename from packages/eslint-config-kibana/typescript.js rename to packages/elastic-eslint-config-kibana/typescript.js index 18b11eb62beef..d3e80b7448151 100644 --- a/packages/eslint-config-kibana/typescript.js +++ b/packages/elastic-eslint-config-kibana/typescript.js @@ -223,7 +223,8 @@ module.exports = { 'no-undef-init': 'error', 'no-unsafe-finally': 'error', 'no-unsanitized/property': 'error', - 'no-unused-expressions': 'error', + 'no-unused-expressions': 'off', + '@typescript-eslint/no-unused-expressions': 'error', 'no-unused-labels': 'error', 'no-var': 'error', 'object-shorthand': 'error', diff --git a/packages/elastic-safer-lodash-set/package.json b/packages/elastic-safer-lodash-set/package.json index f0f425661f605..7602f2fa5924f 100644 --- a/packages/elastic-safer-lodash-set/package.json +++ b/packages/elastic-safer-lodash-set/package.json @@ -16,7 +16,7 @@ "scripts": { "lint": "dependency-check --no-dev package.json set.js setWith.js fp/*.js", "test": "npm run lint && tape test/*.js && npm run test:types", - "test:types": "./scripts/tsd.sh", + "test:types": "tsc --noEmit", "update": "./scripts/update.sh", "save_state": "./scripts/save_state.sh" }, @@ -42,8 +42,5 @@ "ignore": [ "/lodash/" ] - }, - "tsd": { - "directory": "test" } } diff --git a/packages/elastic-safer-lodash-set/scripts/tsd.sh b/packages/elastic-safer-lodash-set/scripts/tsd.sh deleted file mode 100755 index 4572367df415d..0000000000000 --- a/packages/elastic-safer-lodash-set/scripts/tsd.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash - -# Elasticsearch B.V licenses this file to you under the MIT License. -# See `packages/elastic-safer-lodash-set/LICENSE` for more information. - -# tsd will get confused if it finds a tsconfig.json file in the project -# directory and start to scan the entirety of Kibana. We don't want that. -mv tsconfig.json tsconfig.tmp - -clean_up () { - exit_code=$? - mv tsconfig.tmp tsconfig.json - exit $exit_code -} -trap clean_up EXIT - -./node_modules/.bin/tsd diff --git a/packages/elastic-safer-lodash-set/test/fp.test-d.ts b/packages/elastic-safer-lodash-set/test/fp.ts similarity index 100% rename from packages/elastic-safer-lodash-set/test/fp.test-d.ts rename to packages/elastic-safer-lodash-set/test/fp.ts diff --git a/packages/elastic-safer-lodash-set/test/fp_assoc.test-d.ts b/packages/elastic-safer-lodash-set/test/fp_assoc.ts similarity index 100% rename from packages/elastic-safer-lodash-set/test/fp_assoc.test-d.ts rename to packages/elastic-safer-lodash-set/test/fp_assoc.ts diff --git a/packages/elastic-safer-lodash-set/test/fp_assocPath.test-d.ts b/packages/elastic-safer-lodash-set/test/fp_assocPath.ts similarity index 100% rename from packages/elastic-safer-lodash-set/test/fp_assocPath.test-d.ts rename to packages/elastic-safer-lodash-set/test/fp_assocPath.ts diff --git a/packages/elastic-safer-lodash-set/test/fp_set.test-d.ts b/packages/elastic-safer-lodash-set/test/fp_set.ts similarity index 100% rename from packages/elastic-safer-lodash-set/test/fp_set.test-d.ts rename to packages/elastic-safer-lodash-set/test/fp_set.ts diff --git a/packages/elastic-safer-lodash-set/test/fp_setWith.test-d.ts b/packages/elastic-safer-lodash-set/test/fp_setWith.ts similarity index 100% rename from packages/elastic-safer-lodash-set/test/fp_setWith.test-d.ts rename to packages/elastic-safer-lodash-set/test/fp_setWith.ts diff --git a/packages/elastic-safer-lodash-set/test/index.test-d.ts b/packages/elastic-safer-lodash-set/test/index.test-d.ts deleted file mode 100644 index ab29d7de5a03f..0000000000000 --- a/packages/elastic-safer-lodash-set/test/index.test-d.ts +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Elasticsearch B.V licenses this file to you under the MIT License. - * See `packages/elastic-safer-lodash-set/LICENSE` for more information. - */ - -import { expectType } from 'tsd'; -import { set, setWith } from '../'; - -const someObj: object = {}; -const anyValue: any = 'any value'; - -expectType(set(someObj, 'a.b.c', anyValue)); -expectType( - setWith(someObj, 'a.b.c', anyValue, (value, key, obj) => { - expectType(value); - expectType(key); - expectType(obj); - }) -); - -expectType(set(someObj, ['a.b.c'], anyValue)); -expectType( - setWith(someObj, ['a.b.c'], anyValue, (value, key, obj) => { - expectType(value); - expectType(key); - expectType(obj); - }) -); - -expectType(set(someObj, ['a.b.c', 2, Symbol('hep')], anyValue)); -expectType( - setWith(someObj, ['a.b.c', 2, Symbol('hep')], anyValue, (value, key, obj) => { - expectType(value); - expectType(key); - expectType(obj); - }) -); diff --git a/packages/elastic-safer-lodash-set/test/index.ts b/packages/elastic-safer-lodash-set/test/index.ts new file mode 100644 index 0000000000000..2090c1adcfce1 --- /dev/null +++ b/packages/elastic-safer-lodash-set/test/index.ts @@ -0,0 +1,37 @@ +/* + * Elasticsearch B.V licenses this file to you under the MIT License. + * See `packages/elastic-safer-lodash-set/LICENSE` for more information. + */ + +import { expectType } from 'tsd'; +import { set, setWith } from '..'; + +const someObj: object = {}; +const anyValue: any = 'any value'; + +expectType(set(someObj, 'a.b.c', anyValue)); +expectType( + setWith(someObj, 'a.b.c', anyValue, (value, key, obj) => { + expectType(value); + expectType(key); + expectType(obj); + }) +); + +expectType(set(someObj, ['a.b.c'], anyValue)); +expectType( + setWith(someObj, ['a.b.c'], anyValue, (value, key, obj) => { + expectType(value); + expectType(key); + expectType(obj); + }) +); + +expectType(set(someObj, ['a.b.c', 2, Symbol('hep')], anyValue)); +expectType( + setWith(someObj, ['a.b.c', 2, Symbol('hep')], anyValue, (value, key, obj) => { + expectType(value); + expectType(key); + expectType(obj); + }) +); diff --git a/packages/elastic-safer-lodash-set/test/set.test-d.ts b/packages/elastic-safer-lodash-set/test/set.ts similarity index 100% rename from packages/elastic-safer-lodash-set/test/set.test-d.ts rename to packages/elastic-safer-lodash-set/test/set.ts diff --git a/packages/elastic-safer-lodash-set/test/setWith.test-d.ts b/packages/elastic-safer-lodash-set/test/setWith.ts similarity index 100% rename from packages/elastic-safer-lodash-set/test/setWith.test-d.ts rename to packages/elastic-safer-lodash-set/test/setWith.ts diff --git a/packages/elastic-safer-lodash-set/tsconfig.json b/packages/elastic-safer-lodash-set/tsconfig.json index bc1d1a3a7e413..6517e5c60ee01 100644 --- a/packages/elastic-safer-lodash-set/tsconfig.json +++ b/packages/elastic-safer-lodash-set/tsconfig.json @@ -1,9 +1,9 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/elastic-safer-lodash-set" + }, "include": [ - "**/*" + "**/*", ], - "exclude": [ - "**/*.test-d.ts" - ] } diff --git a/packages/eslint-config-kibana/README.md b/packages/eslint-config-kibana/README.md deleted file mode 100644 index 68c1639b834a5..0000000000000 --- a/packages/eslint-config-kibana/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# eslint-config-kibana - -The eslint config used by the kibana team - -## Usage - -To use this eslint config, just install the peer dependencies and reference it -in your `.eslintrc`: - -```javascript -{ - extends: [ - '@elastic/eslint-config-kibana' - ] -} -``` - -## Optional jest config - -If the project uses the [jest test runner](https://facebook.github.io/jest/), -the `@elastic/eslint-config-kibana/jest` config can be extended as well to use -`eslint-plugin-jest` and add settings specific to it: - -```javascript -{ - extends: [ - '@elastic/eslint-config-kibana', - '@elastic/eslint-config-kibana/jest' - ] -} -``` diff --git a/packages/eslint-config-kibana/package.json b/packages/eslint-config-kibana/package.json deleted file mode 100644 index 4ec3bcdfd7c05..0000000000000 --- a/packages/eslint-config-kibana/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "@elastic/eslint-config-kibana", - "version": "0.15.0", - "description": "The eslint config used by the kibana team", - "main": ".eslintrc.js", - "repository": { - "type": "git", - "url": "git+https://github.com/elastic/eslint-config-kibana.git" - }, - "keywords": [], - "author": "Spencer Alger ", - "license": "Apache-2.0", - "bugs": { - "url": "https://github.com/elastic/kibana/tree/master/packages/eslint-config-kibana" - }, - "homepage": "https://github.com/elastic/kibana/tree/master/packages/eslint-config-kibana", - "peerDependencies": { - "@typescript-eslint/eslint-plugin": "^3.10.0", - "@typescript-eslint/parser": "^3.10.0", - "babel-eslint": "^10.0.3", - "eslint": "^6.8.0", - "eslint-plugin-babel": "^5.3.0", - "eslint-plugin-ban": "^1.4.0", - "eslint-plugin-jsx-a11y": "^6.2.3", - "eslint-plugin-eslint-comments": "^3.2.0", - "eslint-plugin-import": "^2.19.1", - "eslint-plugin-jest": "^23.10.0", - "eslint-plugin-mocha": "^6.2.2", - "eslint-plugin-no-unsanitized": "^3.0.2", - "eslint-plugin-prefer-object-spread": "^1.2.1", - "eslint-plugin-react": "^7.17.0", - "eslint-plugin-react-hooks": "^4.0.4" - } -} diff --git a/packages/kbn-analytics/scripts/build.js b/packages/kbn-analytics/scripts/build.js index 448d1ca9332f2..0e00a144d0b92 100644 --- a/packages/kbn-analytics/scripts/build.js +++ b/packages/kbn-analytics/scripts/build.js @@ -71,7 +71,6 @@ run( proc.run(padRight(10, 'tsc'), { cmd: 'tsc', args: [ - '--emitDeclarationOnly', ...(flags.watch ? ['--watch', '--preserveWatchOutput', 'true'] : []), ...(flags['source-maps'] ? ['--declarationMap', 'true'] : []), ], diff --git a/packages/kbn-analytics/tsconfig.json b/packages/kbn-analytics/tsconfig.json index fdd9e8281fba8..861e0204a31a2 100644 --- a/packages/kbn-analytics/tsconfig.json +++ b/packages/kbn-analytics/tsconfig.json @@ -1,8 +1,9 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "declaration": true, - "declarationDir": "./target/types", + "emitDeclarationOnly": true, + "outDir": "./target/types", "stripInternal": true, "declarationMap": true, "types": [ @@ -11,7 +12,7 @@ ] }, "include": [ - "./src/**/*.ts" + "src/**/*" ], "exclude": [ "target" diff --git a/packages/kbn-config-schema/tsconfig.json b/packages/kbn-config-schema/tsconfig.json index f6c61268da17c..6a268f2e7c016 100644 --- a/packages/kbn-config-schema/tsconfig.json +++ b/packages/kbn-config-schema/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "declaration": true, "declarationDir": "./target/types", diff --git a/packages/kbn-dev-utils/tsconfig.json b/packages/kbn-dev-utils/tsconfig.json index 0ec058eeb8a28..1c6c671d0b768 100644 --- a/packages/kbn-dev-utils/tsconfig.json +++ b/packages/kbn-dev-utils/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "target", "target": "ES2019", diff --git a/packages/kbn-es-archiver/src/actions/save.ts b/packages/kbn-es-archiver/src/actions/save.ts index 2f87cabadee6c..84a0ce09936d0 100644 --- a/packages/kbn-es-archiver/src/actions/save.ts +++ b/packages/kbn-es-archiver/src/actions/save.ts @@ -39,6 +39,7 @@ export async function saveAction({ dataDir, log, raw, + query, }: { name: string; indices: string | string[]; @@ -46,6 +47,7 @@ export async function saveAction({ dataDir: string; log: ToolingLog; raw: boolean; + query?: Record; }) { const outputDir = resolve(dataDir, name); const stats = createStats(name, log); @@ -69,7 +71,7 @@ export async function saveAction({ // export all documents from matching indexes into data.json.gz createPromiseFromStreams([ createListStream(indices), - createGenerateDocRecordsStream(client, stats, progress), + createGenerateDocRecordsStream({ client, stats, progress, query }), ...createFormatArchiveStreams({ gzip: !raw }), createWriteStream(resolve(outputDir, `data.json${raw ? '' : '.gz'}`)), ] as [Readable, ...Writable[]]), diff --git a/packages/kbn-es-archiver/src/cli.ts b/packages/kbn-es-archiver/src/cli.ts index 1745bd862b434..41abe83c148cd 100644 --- a/packages/kbn-es-archiver/src/cli.ts +++ b/packages/kbn-es-archiver/src/cli.ts @@ -122,8 +122,10 @@ export function runCli() { `, flags: { boolean: ['raw'], + string: ['query'], help: ` --raw don't gzip the archives + --query query object to limit the documents being archived, needs to be properly escaped JSON `, }, async run({ flags, esArchiver }) { @@ -140,7 +142,17 @@ export function runCli() { throw createFlagError('--raw does not take a value'); } - await esArchiver.save(name, indices, { raw }); + const query = flags.query; + let parsedQuery; + if (typeof query === 'string') { + try { + parsedQuery = JSON.parse(query); + } catch (err) { + throw createFlagError('--query should be valid JSON'); + } + } + + await esArchiver.save(name, indices, { raw, query: parsedQuery }); }, }) .command({ diff --git a/packages/kbn-es-archiver/src/es_archiver.ts b/packages/kbn-es-archiver/src/es_archiver.ts index e335652195b86..d61e7d2a422e8 100644 --- a/packages/kbn-es-archiver/src/es_archiver.ts +++ b/packages/kbn-es-archiver/src/es_archiver.ts @@ -62,7 +62,11 @@ export class EsArchiver { * @property {Boolean} options.raw - should the archive be raw (unzipped) or not * @return Promise */ - async save(name: string, indices: string | string[], { raw = false }: { raw?: boolean } = {}) { + async save( + name: string, + indices: string | string[], + { raw = false, query }: { raw?: boolean; query?: Record } = {} + ) { return await saveAction({ name, indices, @@ -70,6 +74,7 @@ export class EsArchiver { client: this.client, dataDir: this.dataDir, log: this.log, + query, }); } diff --git a/packages/kbn-es-archiver/src/lib/docs/__tests__/generate_doc_records_stream.ts b/packages/kbn-es-archiver/src/lib/docs/__tests__/generate_doc_records_stream.ts index 2214f7ae9f2ea..3c5fc742a6e10 100644 --- a/packages/kbn-es-archiver/src/lib/docs/__tests__/generate_doc_records_stream.ts +++ b/packages/kbn-es-archiver/src/lib/docs/__tests__/generate_doc_records_stream.ts @@ -47,7 +47,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => { const progress = new Progress(); await createPromiseFromStreams([ createListStream(['logstash-*']), - createGenerateDocRecordsStream(client, stats, progress), + createGenerateDocRecordsStream({ client, stats, progress }), ]); expect(progress.getTotal()).to.be(0); @@ -74,7 +74,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => { const progress = new Progress(); await createPromiseFromStreams([ createListStream(['logstash-*']), - createGenerateDocRecordsStream(client, stats, progress), + createGenerateDocRecordsStream({ client, stats, progress }), ]); expect(progress.getTotal()).to.be(0); @@ -115,7 +115,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => { const progress = new Progress(); const docRecords = await createPromiseFromStreams([ createListStream(['index1', 'index2']), - createGenerateDocRecordsStream(client, stats, progress), + createGenerateDocRecordsStream({ client, stats, progress }), createConcatStream([]), ]); diff --git a/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.ts b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.ts index e255a0abc36c5..87c166fe275cc 100644 --- a/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.ts +++ b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.ts @@ -25,7 +25,17 @@ import { Progress } from '../progress'; const SCROLL_SIZE = 1000; const SCROLL_TIMEOUT = '1m'; -export function createGenerateDocRecordsStream(client: Client, stats: Stats, progress: Progress) { +export function createGenerateDocRecordsStream({ + client, + stats, + progress, + query, +}: { + client: Client; + stats: Stats; + progress: Progress; + query?: Record; +}) { return new Transform({ writableObjectMode: true, readableObjectMode: true, @@ -41,6 +51,9 @@ export function createGenerateDocRecordsStream(client: Client, stats: Stats, pro scroll: SCROLL_TIMEOUT, size: SCROLL_SIZE, _source: true, + body: { + query, + }, rest_total_hits_as_int: true, // not declared on SearchParams type } as SearchParams); remainingHits = resp.hits.total; diff --git a/packages/kbn-es-archiver/src/lib/indices/generate_index_records_stream.ts b/packages/kbn-es-archiver/src/lib/indices/generate_index_records_stream.ts index b4b98f8ae262c..07ee1420741c9 100644 --- a/packages/kbn-es-archiver/src/lib/indices/generate_index_records_stream.ts +++ b/packages/kbn-es-archiver/src/lib/indices/generate_index_records_stream.ts @@ -37,6 +37,10 @@ export function createGenerateIndexRecordsStream(client: Client, stats: Stats) { '-*.settings.index.uuid', '-*.settings.index.version', '-*.settings.index.provided_name', + '-*.settings.index.frozen', + '-*.settings.index.search.throttled', + '-*.settings.index.query', + '-*.settings.index.routing', ], })) as Record; diff --git a/packages/kbn-es-archiver/src/lib/streams.ts b/packages/kbn-es-archiver/src/lib/streams.ts deleted file mode 100644 index a90afbe0c4d25..0000000000000 --- a/packages/kbn-es-archiver/src/lib/streams.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export * from '../../../../src/legacy/utils/streams'; diff --git a/src/legacy/utils/streams/concat_stream.test.js b/packages/kbn-es-archiver/src/lib/streams/concat_stream.test.js similarity index 100% rename from src/legacy/utils/streams/concat_stream.test.js rename to packages/kbn-es-archiver/src/lib/streams/concat_stream.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/concat_stream.ts b/packages/kbn-es-archiver/src/lib/streams/concat_stream.ts new file mode 100644 index 0000000000000..03dd894067afc --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/concat_stream.ts @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createReduceStream } from './reduce_stream'; + +/** + * Creates a Transform stream that consumes all provided + * values and concatenates them using each values `concat` + * method. + * + * Concatenate strings: + * createListStream(['f', 'o', 'o']) + * .pipe(createConcatStream()) + * .on('data', console.log) + * // logs "foo" + * + * Concatenate values into an array: + * createListStream([1,2,3]) + * .pipe(createConcatStream([])) + * .on('data', console.log) + * // logs "[1,2,3]" + */ +export function createConcatStream(initial: any) { + return createReduceStream((acc, chunk) => acc.concat(chunk), initial); +} diff --git a/src/legacy/utils/streams/concat_stream_providers.test.js b/packages/kbn-es-archiver/src/lib/streams/concat_stream_providers.test.js similarity index 100% rename from src/legacy/utils/streams/concat_stream_providers.test.js rename to packages/kbn-es-archiver/src/lib/streams/concat_stream_providers.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/concat_stream_providers.ts b/packages/kbn-es-archiver/src/lib/streams/concat_stream_providers.ts new file mode 100644 index 0000000000000..4794d76cc7f84 --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/concat_stream_providers.ts @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { PassThrough, TransformOptions } from 'stream'; + +/** + * Write the data and errors from a list of stream providers + * to a single stream in order. Stream providers are only + * called right before they will be consumed, and only one + * provider will be active at a time. + */ +export function concatStreamProviders( + sourceProviders: Array<() => NodeJS.ReadableStream>, + options: TransformOptions = {} +) { + const destination = new PassThrough(options); + const queue = sourceProviders.slice(); + + (function pipeNext() { + const provider = queue.shift(); + + if (!provider) { + return; + } + + const source = provider(); + const isLast = !queue.length; + + // if there are more sources to pipe, hook + // into the source completion + if (!isLast) { + source.once('end', pipeNext); + } + + source + // proxy errors from the source to the destination + .once('error', (error) => destination.emit('error', error)) + // pipe the source to the destination but only proxy the + // end event if this is the last source + .pipe(destination, { end: isLast }); + })(); + + return destination; +} diff --git a/src/legacy/utils/streams/filter_stream.test.ts b/packages/kbn-es-archiver/src/lib/streams/filter_stream.test.ts similarity index 100% rename from src/legacy/utils/streams/filter_stream.test.ts rename to packages/kbn-es-archiver/src/lib/streams/filter_stream.test.ts diff --git a/packages/kbn-es-archiver/src/lib/streams/filter_stream.ts b/packages/kbn-es-archiver/src/lib/streams/filter_stream.ts new file mode 100644 index 0000000000000..738b9d5793d06 --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/filter_stream.ts @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +export function createFilterStream(fn: (obj: T) => boolean) { + return new Transform({ + objectMode: true, + async transform(obj, _, done) { + const canPushDownStream = fn(obj); + if (canPushDownStream) { + this.push(obj); + } + done(); + }, + }); +} diff --git a/src/legacy/utils/streams/index.js b/packages/kbn-es-archiver/src/lib/streams/index.ts similarity index 100% rename from src/legacy/utils/streams/index.js rename to packages/kbn-es-archiver/src/lib/streams/index.ts diff --git a/src/legacy/utils/streams/intersperse_stream.test.js b/packages/kbn-es-archiver/src/lib/streams/intersperse_stream.test.js similarity index 100% rename from src/legacy/utils/streams/intersperse_stream.test.js rename to packages/kbn-es-archiver/src/lib/streams/intersperse_stream.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/intersperse_stream.ts b/packages/kbn-es-archiver/src/lib/streams/intersperse_stream.ts new file mode 100644 index 0000000000000..eb2e3d3087d4a --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/intersperse_stream.ts @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +/** + * Create a Transform stream that receives values in object mode, + * and intersperses a chunk between each object received. + * + * This is useful for writing lists: + * + * createListStream(['foo', 'bar']) + * .pipe(createIntersperseStream('\n')) + * .pipe(process.stdout) // outputs "foo\nbar" + * + * Combine with a concat stream to get "join" like functionality: + * + * await createPromiseFromStreams([ + * createListStream(['foo', 'bar']), + * createIntersperseStream(' '), + * createConcatStream() + * ]) // produces a single value "foo bar" + */ +export function createIntersperseStream(intersperseChunk: any) { + let first = true; + + return new Transform({ + writableObjectMode: true, + readableObjectMode: true, + transform(chunk, _, callback) { + try { + if (first) { + first = false; + } else { + this.push(intersperseChunk); + } + + this.push(chunk); + callback(undefined); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/src/legacy/utils/streams/list_stream.test.js b/packages/kbn-es-archiver/src/lib/streams/list_stream.test.js similarity index 100% rename from src/legacy/utils/streams/list_stream.test.js rename to packages/kbn-es-archiver/src/lib/streams/list_stream.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/list_stream.ts b/packages/kbn-es-archiver/src/lib/streams/list_stream.ts new file mode 100644 index 0000000000000..c061b969b3c09 --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/list_stream.ts @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Readable } from 'stream'; + +/** + * Create a Readable stream that provides the items + * from a list as objects to subscribers + */ +export function createListStream(items: any | any[] = []) { + const queue: any[] = [].concat(items); + + return new Readable({ + objectMode: true, + read(size) { + queue.splice(0, size).forEach((item) => { + this.push(item); + }); + + if (!queue.length) { + this.push(null); + } + }, + }); +} diff --git a/src/legacy/utils/streams/map_stream.test.js b/packages/kbn-es-archiver/src/lib/streams/map_stream.test.js similarity index 100% rename from src/legacy/utils/streams/map_stream.test.js rename to packages/kbn-es-archiver/src/lib/streams/map_stream.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/map_stream.ts b/packages/kbn-es-archiver/src/lib/streams/map_stream.ts new file mode 100644 index 0000000000000..e88c512a38653 --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/map_stream.ts @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +export function createMapStream(fn: (chunk: any, i: number) => T | Promise) { + let i = 0; + + return new Transform({ + objectMode: true, + async transform(value, _, done) { + try { + this.push(await fn(value, i++)); + done(); + } catch (err) { + done(err); + } + }, + }); +} diff --git a/src/legacy/utils/streams/promise_from_streams.test.js b/packages/kbn-es-archiver/src/lib/streams/promise_from_streams.test.js similarity index 100% rename from src/legacy/utils/streams/promise_from_streams.test.js rename to packages/kbn-es-archiver/src/lib/streams/promise_from_streams.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/promise_from_streams.ts b/packages/kbn-es-archiver/src/lib/streams/promise_from_streams.ts new file mode 100644 index 0000000000000..fefb18be14780 --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/promise_from_streams.ts @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Take an array of streams, pipe the output + * from each one into the next, listening for + * errors from any of the streams, and then resolve + * the promise once the final stream has finished + * writing/reading. + * + * If the last stream is readable, it's final value + * will be provided as the promise value. + * + * Errors emitted from any stream will cause + * the promise to be rejected with that error. + */ + +import { pipeline, Writable } from 'stream'; +import { promisify } from 'util'; + +const asyncPipeline = promisify(pipeline); + +export async function createPromiseFromStreams(streams: any): Promise { + let finalChunk: any; + const last = streams[streams.length - 1]; + if (typeof last.read !== 'function' && streams.length === 1) { + // For a nicer error than what stream.pipeline throws + throw new Error('A minimum of 2 streams is required when a non-readable stream is given'); + } + if (typeof last.read === 'function') { + // We are pushing a writable stream to capture the last chunk + streams.push( + new Writable({ + // Use object mode even when "last" stream isn't. This allows to + // capture the last chunk as-is. + objectMode: true, + write(chunk, _, done) { + finalChunk = chunk; + done(); + }, + }) + ); + } + + await asyncPipeline(...(streams as [any])); + + return finalChunk; +} diff --git a/src/legacy/utils/streams/reduce_stream.test.js b/packages/kbn-es-archiver/src/lib/streams/reduce_stream.test.js similarity index 100% rename from src/legacy/utils/streams/reduce_stream.test.js rename to packages/kbn-es-archiver/src/lib/streams/reduce_stream.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/reduce_stream.ts b/packages/kbn-es-archiver/src/lib/streams/reduce_stream.ts new file mode 100644 index 0000000000000..d9458e9a11c33 --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/reduce_stream.ts @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +/** + * Create a transform stream that consumes each chunk it receives + * and passes it to the reducer, which will return the new value + * for the stream. Once all chunks have been received the reduce + * stream provides the result of final call to the reducer to + * subscribers. + */ +export function createReduceStream( + reducer: (acc: any, chunk: any, env: string) => any, + initial: any +) { + let i = -1; + let value = initial; + + // if the reducer throws an error then the value is + // considered invalid and the stream will never provide + // it to subscribers. We will also stop calling the + // reducer for any new data that is provided to us + let failed = false; + + if (typeof reducer !== 'function') { + throw new TypeError('reducer must be a function'); + } + + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + async transform(chunk, enc, callback) { + try { + if (failed) { + return callback(); + } + + i += 1; + if (i === 0 && initial === undefined) { + value = chunk; + } else { + value = await reducer(value, chunk, enc); + } + + callback(); + } catch (err) { + failed = true; + callback(err); + } + }, + + flush(callback) { + if (!failed) { + this.push(value); + } + + callback(); + }, + }); +} diff --git a/src/legacy/utils/streams/replace_stream.test.js b/packages/kbn-es-archiver/src/lib/streams/replace_stream.test.js similarity index 100% rename from src/legacy/utils/streams/replace_stream.test.js rename to packages/kbn-es-archiver/src/lib/streams/replace_stream.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/replace_stream.ts b/packages/kbn-es-archiver/src/lib/streams/replace_stream.ts new file mode 100644 index 0000000000000..fe2ba1fcdf31c --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/replace_stream.ts @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +export function createReplaceStream(toReplace: string, replacement: string) { + if (typeof toReplace !== 'string') { + throw new TypeError('toReplace must be a string'); + } + + let buffer = Buffer.alloc(0); + return new Transform({ + objectMode: false, + async transform(value, _, done) { + try { + buffer = Buffer.concat([buffer, value], buffer.length + value.length); + + while (true) { + // try to find the next instance of `toReplace` in buffer + const index = buffer.indexOf(toReplace); + + // if there is no next instance, break + if (index === -1) { + break; + } + + // flush everything to the left of the next instance + // of `toReplace` + this.push(buffer.slice(0, index)); + + // then flush an instance of `replacement` + this.push(replacement); + + // and finally update the buffer to include everything + // to the right of `toReplace`, dropping to replace from the buffer + buffer = buffer.slice(index + toReplace.length); + } + + // until now we have only flushed data that is to the left + // of a discovered instance of `toReplace`. If `toReplace` is + // never found this would lead to us buffering the entire stream. + // + // Instead, we only keep enough buffer to complete a potentially + // partial instance of `toReplace` + if (buffer.length > toReplace.length) { + // the entire buffer except the last `toReplace.length` bytes + // so that if all but one byte from `toReplace` is in the buffer, + // and the next chunk delivers the necessary byte, the buffer will then + // contain a complete `toReplace` token. + this.push(buffer.slice(0, buffer.length - toReplace.length)); + buffer = buffer.slice(-toReplace.length); + } + + done(); + } catch (err) { + done(err); + } + }, + + flush(callback) { + if (buffer.length) { + this.push(buffer); + } + + callback(); + }, + }); +} diff --git a/src/legacy/utils/streams/split_stream.test.js b/packages/kbn-es-archiver/src/lib/streams/split_stream.test.js similarity index 100% rename from src/legacy/utils/streams/split_stream.test.js rename to packages/kbn-es-archiver/src/lib/streams/split_stream.test.js diff --git a/packages/kbn-es-archiver/src/lib/streams/split_stream.ts b/packages/kbn-es-archiver/src/lib/streams/split_stream.ts new file mode 100644 index 0000000000000..1c9b59449bd92 --- /dev/null +++ b/packages/kbn-es-archiver/src/lib/streams/split_stream.ts @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +/** + * Creates a Transform stream that consumes a stream of Buffers + * and produces a stream of strings (in object mode) by splitting + * the received bytes using the splitChunk. + * + * Ways this is behaves like String#split: + * - instances of splitChunk are removed from the input + * - splitChunk can be on any size + * - if there are no bytes found after the last splitChunk + * a final empty chunk is emitted + * + * Ways this deviates from String#split: + * - splitChunk cannot be a regexp + * - an empty string or Buffer will not produce a stream of individual + * bytes like `string.split('')` would + */ +export function createSplitStream(splitChunk: string) { + let unsplitBuffer = Buffer.alloc(0); + + return new Transform({ + writableObjectMode: false, + readableObjectMode: true, + transform(chunk, _, callback) { + try { + let i; + let toSplit = Buffer.concat([unsplitBuffer, chunk]); + while ((i = toSplit.indexOf(splitChunk)) !== -1) { + const slice = toSplit.slice(0, i); + toSplit = toSplit.slice(i + splitChunk.length); + this.push(slice.toString('utf8')); + } + + unsplitBuffer = toSplit; + callback(undefined); + } catch (err) { + callback(err); + } + }, + + flush(callback) { + try { + this.push(unsplitBuffer.toString('utf8')); + + callback(undefined); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/packages/kbn-es-archiver/tsconfig.json b/packages/kbn-es-archiver/tsconfig.json index 6ffa64d91fba0..02209a29e5817 100644 --- a/packages/kbn-es-archiver/tsconfig.json +++ b/packages/kbn-es-archiver/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "declaration": true, diff --git a/packages/kbn-es/package.json b/packages/kbn-es/package.json index c3670f648d309..dabf11fdd0b66 100644 --- a/packages/kbn-es/package.json +++ b/packages/kbn-es/package.json @@ -16,7 +16,7 @@ "glob": "^7.1.2", "node-fetch": "^2.6.0", "simple-git": "^1.91.0", - "tar-fs": "^1.16.3", + "tar-fs": "^2.1.0", "tree-kill": "^1.2.2", "yauzl": "^2.10.0" } diff --git a/packages/kbn-es/tsconfig.json b/packages/kbn-es/tsconfig.json index 6bb61453c99e7..9487a28232684 100644 --- a/packages/kbn-es/tsconfig.json +++ b/packages/kbn-es/tsconfig.json @@ -1,6 +1,9 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-es" + }, "include": [ - "src/**/*.ts" + "src/**/*" ] } diff --git a/packages/kbn-eslint-import-resolver-kibana/lib/get_webpack_config.js b/packages/kbn-eslint-import-resolver-kibana/lib/get_webpack_config.js index d4e234e3a6a2e..60a03ae8a104e 100755 --- a/packages/kbn-eslint-import-resolver-kibana/lib/get_webpack_config.js +++ b/packages/kbn-eslint-import-resolver-kibana/lib/get_webpack_config.js @@ -27,11 +27,7 @@ exports.getWebpackConfig = function (kibanaPath) { mainFields: ['browser', 'main'], modules: ['node_modules', resolve(kibanaPath, 'node_modules')], alias: { - // Kibana defaults https://github.com/elastic/kibana/blob/6998f074542e8c7b32955db159d15661aca253d7/src/legacy/ui/ui_bundler_env.js#L30-L36 - ui: resolve(kibanaPath, 'src/legacy/ui/public'), - // Dev defaults for test bundle https://github.com/elastic/kibana/blob/6998f074542e8c7b32955db159d15661aca253d7/src/core_plugins/tests_bundle/index.js#L73-L78 - ng_mock$: resolve(kibanaPath, 'src/test_utils/public/ng_mock'), fixtures: resolve(kibanaPath, 'src/fixtures'), test_utils: resolve(kibanaPath, 'src/test_utils/public'), }, diff --git a/packages/kbn-expect/tsconfig.json b/packages/kbn-expect/tsconfig.json index a09ae2d7ae641..ae7e9ff090cc2 100644 --- a/packages/kbn-expect/tsconfig.json +++ b/packages/kbn-expect/tsconfig.json @@ -1,5 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-expect" + }, "include": [ "expect.js.d.ts" ] diff --git a/packages/kbn-i18n/scripts/build.js b/packages/kbn-i18n/scripts/build.js index 62e1a35f00399..1d2b5031e37d7 100644 --- a/packages/kbn-i18n/scripts/build.js +++ b/packages/kbn-i18n/scripts/build.js @@ -71,7 +71,6 @@ run( proc.run(padRight(10, 'tsc'), { cmd: 'tsc', args: [ - '--emitDeclarationOnly', ...(flags.watch ? ['--watch', '--preserveWatchOutput', 'true'] : []), ...(flags['source-maps'] ? ['--declarationMap', 'true'] : []), ], diff --git a/packages/kbn-i18n/tsconfig.json b/packages/kbn-i18n/tsconfig.json index d3dae3078c1d7..c6380f1cde969 100644 --- a/packages/kbn-i18n/tsconfig.json +++ b/packages/kbn-i18n/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "include": [ "src/**/*.ts", "src/**/*.tsx", @@ -11,7 +11,8 @@ ], "compilerOptions": { "declaration": true, - "declarationDir": "./target/types", + "emitDeclarationOnly": true, + "outDir": "./target/types", "types": [ "jest", "node" diff --git a/packages/kbn-interpreter/tsconfig.json b/packages/kbn-interpreter/tsconfig.json index 63376a7ca1ae8..3b81bbb118a55 100644 --- a/packages/kbn-interpreter/tsconfig.json +++ b/packages/kbn-interpreter/tsconfig.json @@ -1,4 +1,7 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-interpreter" + }, "include": ["index.d.ts", "src/**/*.d.ts"] } diff --git a/packages/kbn-monaco/tsconfig.json b/packages/kbn-monaco/tsconfig.json index 95acfd32b24dd..6d3f433c6a6d1 100644 --- a/packages/kbn-monaco/tsconfig.json +++ b/packages/kbn-monaco/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "declaration": true, diff --git a/packages/kbn-optimizer/src/worker/webpack.config.ts b/packages/kbn-optimizer/src/worker/webpack.config.ts index 6b07384910abb..9f2c5654a8bd4 100644 --- a/packages/kbn-optimizer/src/worker/webpack.config.ts +++ b/packages/kbn-optimizer/src/worker/webpack.config.ts @@ -79,7 +79,6 @@ export function getWebpackConfig(bundle: Bundle, bundleRefs: BundleRefs, worker: // or which have require() statements that should be ignored because the file is // already bundled with all its necessary depedencies noParse: [ - /[\/\\]node_modules[\/\\]elasticsearch-browser[\/\\]/, /[\/\\]node_modules[\/\\]lodash[\/\\]index\.js$/, /[\/\\]node_modules[\/\\]vega[\/\\]build[\/\\]vega\.js$/, ], diff --git a/packages/kbn-optimizer/tsconfig.json b/packages/kbn-optimizer/tsconfig.json index e2994f4d02414..20b06b5658cbc 100644 --- a/packages/kbn-optimizer/tsconfig.json +++ b/packages/kbn-optimizer/tsconfig.json @@ -1,5 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-optimizer" + }, "include": [ "index.d.ts", "src/**/*" diff --git a/packages/kbn-plugin-generator/tsconfig.json b/packages/kbn-plugin-generator/tsconfig.json index fc88223dae4b2..c54ff041d7065 100644 --- a/packages/kbn-plugin-generator/tsconfig.json +++ b/packages/kbn-plugin-generator/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "target", "target": "ES2019", diff --git a/packages/kbn-plugin-helpers/package.json b/packages/kbn-plugin-helpers/package.json index 129c58a4b4174..f292387c12521 100644 --- a/packages/kbn-plugin-helpers/package.json +++ b/packages/kbn-plugin-helpers/package.json @@ -23,10 +23,10 @@ "vinyl-fs": "^3.0.3" }, "devDependencies": { - "@types/decompress": "^4.2.3", + "@types/extract-zip": "^1.6.2", "@types/gulp-zip": "^4.0.1", "@types/inquirer": "^6.5.0", - "decompress": "^4.2.1", + "extract-zip": "^2.0.1", "typescript": "4.0.2" } } diff --git a/packages/kbn-plugin-helpers/src/integration_tests/build.test.ts b/packages/kbn-plugin-helpers/src/integration_tests/build.test.ts index 62f83cd672f3d..be23d8dbde646 100644 --- a/packages/kbn-plugin-helpers/src/integration_tests/build.test.ts +++ b/packages/kbn-plugin-helpers/src/integration_tests/build.test.ts @@ -22,7 +22,7 @@ import Fs from 'fs'; import execa from 'execa'; import { createStripAnsiSerializer, REPO_ROOT, createReplaceSerializer } from '@kbn/dev-utils'; -import decompress from 'decompress'; +import extract from 'extract-zip'; import del from 'del'; import globby from 'globby'; import loadJsonFile from 'load-json-file'; @@ -81,7 +81,7 @@ it('builds a generated plugin into a viable archive', async () => { info compressing plugin into [fooTestPlugin-7.5.0.zip]" `); - await decompress(PLUGIN_ARCHIVE, TMP_DIR); + await extract(PLUGIN_ARCHIVE, { dir: TMP_DIR }); const files = await globby(['**/*'], { cwd: TMP_DIR }); files.sort((a, b) => a.localeCompare(b)); diff --git a/packages/kbn-plugin-helpers/tsconfig.json b/packages/kbn-plugin-helpers/tsconfig.json index e794b11b14afa..651bc79d6e707 100644 --- a/packages/kbn-plugin-helpers/tsconfig.json +++ b/packages/kbn-plugin-helpers/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "target", "declaration": true, diff --git a/packages/kbn-pm/README.md b/packages/kbn-pm/README.md index 05405e7b98374..c169b5c75e178 100644 --- a/packages/kbn-pm/README.md +++ b/packages/kbn-pm/README.md @@ -19,7 +19,7 @@ From a plugin perspective there are two different types of Kibana dependencies: runtime and static dependencies. Runtime dependencies are things that are instantiated at runtime and that are injected into the plugin, for example config and elasticsearch clients. Static dependencies are those dependencies -that we want to `import`. `eslint-config-kibana` is one example of this, and +that we want to `import`. `elastic-eslint-config-kibana` is one example of this, and it's actually needed because eslint requires it to be a separate package. But we also have dependencies like `datemath`, `flot`, `eui` and others that we control, but where we want to `import` them in plugins instead of injecting them diff --git a/packages/kbn-pm/dist/index.js b/packages/kbn-pm/dist/index.js index eb2d0d2581a34..9a3bb1c687032 100644 --- a/packages/kbn-pm/dist/index.js +++ b/packages/kbn-pm/dist/index.js @@ -27651,6 +27651,7 @@ var eos = function(stream, opts, callback) { var rs = stream._readableState; var readable = opts.readable || (opts.readable !== false && stream.readable); var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; var onlegacyfinish = function() { if (!stream.writable) onfinish(); @@ -27675,8 +27676,13 @@ var eos = function(stream, opts, callback) { }; var onclose = function() { - if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close')); + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); }; var onrequest = function() { @@ -27701,6 +27707,7 @@ var eos = function(stream, opts, callback) { stream.on('close', onclose); return function() { + cancelled = true; stream.removeListener('complete', onfinish); stream.removeListener('abort', onclose); stream.removeListener('request', onrequest); diff --git a/packages/kbn-pm/tsconfig.json b/packages/kbn-pm/tsconfig.json index c13a9243c50aa..175c4701f2e5b 100644 --- a/packages/kbn-pm/tsconfig.json +++ b/packages/kbn-pm/tsconfig.json @@ -1,12 +1,13 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "include": [ "./index.d.ts", "./src/**/*.ts", - "./dist/*.d.ts", + "./dist/*.d.ts" ], "exclude": [], "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-pm", "types": [ "jest", "node" diff --git a/packages/kbn-release-notes/src/cli.ts b/packages/kbn-release-notes/src/cli.ts index 44b4a7a0282d2..7dcfa38078391 100644 --- a/packages/kbn-release-notes/src/cli.ts +++ b/packages/kbn-release-notes/src/cli.ts @@ -25,8 +25,7 @@ import { run, createFlagError, createFailError, REPO_ROOT } from '@kbn/dev-utils import { FORMATS, SomeFormat } from './formats'; import { - iterRelevantPullRequests, - getPr, + PrApi, Version, ClassifiedPr, streamFromIterable, @@ -48,6 +47,7 @@ export function runReleaseNotesCli() { if (!token || typeof token !== 'string') { throw createFlagError('--token must be defined'); } + const prApi = new PrApi(log, token); const version = Version.fromFlag(flags.version); if (!version) { @@ -80,7 +80,7 @@ export function runReleaseNotesCli() { } const summary = new IrrelevantPrSummary(log); - const pr = await getPr(token, number); + const pr = await prApi.getPr(number); log.success( inspect( { @@ -101,7 +101,7 @@ export function runReleaseNotesCli() { const summary = new IrrelevantPrSummary(log); const prsToReport: ClassifiedPr[] = []; - const prIterable = iterRelevantPullRequests(token, version, log); + const prIterable = prApi.iterRelevantPullRequests(version); for await (const pr of prIterable) { if (!isPrRelevant(pr, version, includeVersions, summary)) { continue; diff --git a/packages/kbn-release-notes/src/lib/classify_pr.ts b/packages/kbn-release-notes/src/lib/classify_pr.ts index c567935ab7e48..2dfe6916235ee 100644 --- a/packages/kbn-release-notes/src/lib/classify_pr.ts +++ b/packages/kbn-release-notes/src/lib/classify_pr.ts @@ -27,7 +27,7 @@ import { ASCIIDOC_SECTIONS, UNKNOWN_ASCIIDOC_SECTION, } from '../release_notes_config'; -import { PullRequest } from './pull_request'; +import { PullRequest } from './pr_api'; export interface ClassifiedPr extends PullRequest { area: Area; diff --git a/packages/kbn-release-notes/src/lib/index.ts b/packages/kbn-release-notes/src/lib/index.ts index 00d8f49cf763f..8d27a26d96d0a 100644 --- a/packages/kbn-release-notes/src/lib/index.ts +++ b/packages/kbn-release-notes/src/lib/index.ts @@ -17,7 +17,7 @@ * under the License. */ -export * from './pull_request'; +export * from './pr_api'; export * from './version'; export * from './is_pr_relevant'; export * from './streams'; diff --git a/packages/kbn-release-notes/src/lib/irrelevant_pr_summary.ts b/packages/kbn-release-notes/src/lib/irrelevant_pr_summary.ts index 1a458a04c7740..ba82ab8780465 100644 --- a/packages/kbn-release-notes/src/lib/irrelevant_pr_summary.ts +++ b/packages/kbn-release-notes/src/lib/irrelevant_pr_summary.ts @@ -19,7 +19,7 @@ import { ToolingLog } from '@kbn/dev-utils'; -import { PullRequest } from './pull_request'; +import { PullRequest } from './pr_api'; import { Version } from './version'; export class IrrelevantPrSummary { diff --git a/packages/kbn-release-notes/src/lib/is_pr_relevant.ts b/packages/kbn-release-notes/src/lib/is_pr_relevant.ts index af2ef9440dede..452a14e919ed4 100644 --- a/packages/kbn-release-notes/src/lib/is_pr_relevant.ts +++ b/packages/kbn-release-notes/src/lib/is_pr_relevant.ts @@ -18,7 +18,7 @@ */ import { Version } from './version'; -import { PullRequest } from './pull_request'; +import { PullRequest } from './pr_api'; import { IGNORE_LABELS } from '../release_notes_config'; import { IrrelevantPrSummary } from './irrelevant_pr_summary'; diff --git a/packages/kbn-release-notes/src/lib/pr_api.ts b/packages/kbn-release-notes/src/lib/pr_api.ts new file mode 100644 index 0000000000000..1f26aa7ad86c3 --- /dev/null +++ b/packages/kbn-release-notes/src/lib/pr_api.ts @@ -0,0 +1,231 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { inspect } from 'util'; + +import Axios from 'axios'; +import gql from 'graphql-tag'; +import * as GraphqlPrinter from 'graphql/language/printer'; +import { DocumentNode } from 'graphql/language/ast'; +import makeTerminalLink from 'terminal-link'; +import { ToolingLog, isAxiosResponseError } from '@kbn/dev-utils'; + +import { Version } from './version'; +import { getFixReferences } from './get_fix_references'; +import { getNoteFromDescription } from './get_note_from_description'; + +const PrNodeFragment = gql` + fragment PrNode on PullRequest { + number + url + title + bodyText + bodyHTML + mergedAt + baseRefName + state + author { + login + ... on User { + name + } + } + labels(first: 100) { + nodes { + name + } + } + } +`; + +export interface PullRequest { + number: number; + url: string; + title: string; + targetBranch: string; + mergedAt: string; + state: string; + labels: string[]; + fixes: string[]; + user: { + name: string; + login: string; + }; + versions: Version[]; + terminalLink: string; + note?: string; +} + +export class PrApi { + constructor(private readonly log: ToolingLog, private readonly token: string) {} + + async getPr(number: number) { + const resp = await this.gqlRequest( + gql` + query($number: Int!) { + repository(owner: "elastic", name: "kibana") { + pullRequest(number: $number) { + ...PrNode + } + } + } + ${PrNodeFragment} + `, + { + number, + } + ); + + const node = resp.data?.repository?.pullRequest; + if (!node) { + throw new Error(`unexpected github response, unable to fetch PR: ${inspect(resp)}`); + } + + return this.parsePullRequestNode(node); + } + + /** + * Iterate all of the PRs which have the `version` label + */ + async *iterRelevantPullRequests(version: Version) { + let nextCursor: string | undefined; + let hasNextPage = true; + + while (hasNextPage) { + const resp = await this.gqlRequest( + gql` + query($cursor: String, $labels: [String!]) { + repository(owner: "elastic", name: "kibana") { + pullRequests(first: 100, after: $cursor, labels: $labels, states: MERGED) { + pageInfo { + hasNextPage + endCursor + } + nodes { + ...PrNode + } + } + } + } + ${PrNodeFragment} + `, + { + cursor: nextCursor, + labels: [version.label], + } + ); + + const pullRequests = resp.data?.repository?.pullRequests; + if (!pullRequests) { + throw new Error(`unexpected github response, unable to fetch PRs: ${inspect(resp)}`); + } + + hasNextPage = pullRequests.pageInfo?.hasNextPage; + nextCursor = pullRequests.pageInfo?.endCursor; + + if (hasNextPage === undefined || (hasNextPage && !nextCursor)) { + throw new Error( + `github response does not include valid pagination information: ${inspect(resp)}` + ); + } + + for (const node of pullRequests.nodes) { + yield this.parsePullRequestNode(node); + } + } + } + + /** + * Convert the Github API response into the structure used by this tool + * + * @param node A GraphQL response from Github using the PrNode fragment + */ + private parsePullRequestNode(node: any): PullRequest { + const terminalLink = makeTerminalLink(`#${node.number}`, node.url); + + const labels: string[] = node.labels.nodes.map((l: { name: string }) => l.name); + + return { + number: node.number, + url: node.url, + terminalLink, + title: node.title, + targetBranch: node.baseRefName, + state: node.state, + mergedAt: node.mergedAt, + labels, + fixes: getFixReferences(node.bodyText), + user: { + login: node.author?.login || 'deleted user', + name: node.author?.name, + }, + versions: labels + .map((l) => Version.fromLabel(l)) + .filter((v): v is Version => v instanceof Version), + note: getNoteFromDescription(node.bodyHTML), + }; + } + + /** + * Send a single request to the Github v4 GraphQL API + */ + private async gqlRequest(query: DocumentNode, variables: Record = {}) { + let attempt = 0; + + while (true) { + attempt += 1; + + try { + const resp = await Axios.request({ + url: 'https://api.github.com/graphql', + method: 'POST', + headers: { + 'user-agent': '@kbn/release-notes', + authorization: `bearer ${this.token}`, + }, + data: { + query: GraphqlPrinter.print(query), + variables, + }, + }); + + return resp.data; + } catch (error) { + if (!isAxiosResponseError(error) || error.response.status < 500) { + // rethrow error unless it is a 500+ response from github + throw error; + } + + const { status, data } = error.response; + const resp = inspect(data); + + if (attempt === 5) { + throw new Error( + `${status} response from Github, attempted request ${attempt} times: [${resp}]` + ); + } + + const delay = attempt * 2000; + this.log.debug(`Github responded with ${status}, retrying in ${delay} ms: [${resp}]`); + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + } + } +} diff --git a/packages/kbn-release-notes/src/lib/pull_request.ts b/packages/kbn-release-notes/src/lib/pull_request.ts deleted file mode 100644 index e61e496642062..0000000000000 --- a/packages/kbn-release-notes/src/lib/pull_request.ts +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { inspect } from 'util'; - -import Axios from 'axios'; -import gql from 'graphql-tag'; -import * as GraphqlPrinter from 'graphql/language/printer'; -import { DocumentNode } from 'graphql/language/ast'; -import makeTerminalLink from 'terminal-link'; -import { ToolingLog } from '@kbn/dev-utils'; - -import { Version } from './version'; -import { getFixReferences } from './get_fix_references'; -import { getNoteFromDescription } from './get_note_from_description'; - -const PrNodeFragment = gql` - fragment PrNode on PullRequest { - number - url - title - bodyText - bodyHTML - mergedAt - baseRefName - state - author { - login - ... on User { - name - } - } - labels(first: 100) { - nodes { - name - } - } - } -`; - -export interface PullRequest { - number: number; - url: string; - title: string; - targetBranch: string; - mergedAt: string; - state: string; - labels: string[]; - fixes: string[]; - user: { - name: string; - login: string; - }; - versions: Version[]; - terminalLink: string; - note?: string; -} - -/** - * Send a single request to the Github v4 GraphQL API - */ -async function gqlRequest( - token: string, - query: DocumentNode, - variables: Record = {} -) { - const resp = await Axios.request({ - url: 'https://api.github.com/graphql', - method: 'POST', - headers: { - 'user-agent': '@kbn/release-notes', - authorization: `bearer ${token}`, - }, - data: { - query: GraphqlPrinter.print(query), - variables, - }, - }); - - return resp.data; -} - -/** - * Convert the Github API response into the structure used by this tool - * - * @param node A GraphQL response from Github using the PrNode fragment - */ -function parsePullRequestNode(node: any): PullRequest { - const terminalLink = makeTerminalLink(`#${node.number}`, node.url); - - const labels: string[] = node.labels.nodes.map((l: { name: string }) => l.name); - - return { - number: node.number, - url: node.url, - terminalLink, - title: node.title, - targetBranch: node.baseRefName, - state: node.state, - mergedAt: node.mergedAt, - labels, - fixes: getFixReferences(node.bodyText), - user: { - login: node.author?.login || 'deleted user', - name: node.author?.name, - }, - versions: labels - .map((l) => Version.fromLabel(l)) - .filter((v): v is Version => v instanceof Version), - note: getNoteFromDescription(node.bodyHTML), - }; -} - -/** - * Iterate all of the PRs which have the `version` label - */ -export async function* iterRelevantPullRequests(token: string, version: Version, log: ToolingLog) { - let nextCursor: string | undefined; - let hasNextPage = true; - - while (hasNextPage) { - const resp = await gqlRequest( - token, - gql` - query($cursor: String, $labels: [String!]) { - repository(owner: "elastic", name: "kibana") { - pullRequests(first: 100, after: $cursor, labels: $labels, states: MERGED) { - pageInfo { - hasNextPage - endCursor - } - nodes { - ...PrNode - } - } - } - } - ${PrNodeFragment} - `, - { - cursor: nextCursor, - labels: [version.label], - } - ); - - const pullRequests = resp.data?.repository?.pullRequests; - if (!pullRequests) { - throw new Error(`unexpected github response, unable to fetch PRs: ${inspect(resp)}`); - } - - hasNextPage = pullRequests.pageInfo?.hasNextPage; - nextCursor = pullRequests.pageInfo?.endCursor; - - if (hasNextPage === undefined || (hasNextPage && !nextCursor)) { - throw new Error( - `github response does not include valid pagination information: ${inspect(resp)}` - ); - } - - for (const node of pullRequests.nodes) { - yield parsePullRequestNode(node); - } - } -} - -export async function getPr(token: string, number: number) { - const resp = await gqlRequest( - token, - gql` - query($number: Int!) { - repository(owner: "elastic", name: "kibana") { - pullRequest(number: $number) { - ...PrNode - } - } - } - ${PrNodeFragment} - `, - { - number, - } - ); - - const node = resp.data?.repository?.pullRequest; - if (!node) { - throw new Error(`unexpected github response, unable to fetch PR: ${inspect(resp)}`); - } - - return parsePullRequestNode(node); -} diff --git a/packages/kbn-release-notes/tsconfig.json b/packages/kbn-release-notes/tsconfig.json index 6ffa64d91fba0..02209a29e5817 100644 --- a/packages/kbn-release-notes/tsconfig.json +++ b/packages/kbn-release-notes/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "./target", "declaration": true, diff --git a/packages/kbn-telemetry-tools/tsconfig.json b/packages/kbn-telemetry-tools/tsconfig.json index 13ce8ef2bad60..98512053a5c92 100644 --- a/packages/kbn-telemetry-tools/tsconfig.json +++ b/packages/kbn-telemetry-tools/tsconfig.json @@ -1,5 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-telemetry-tools" + }, "include": [ "src/**/*", ] diff --git a/packages/kbn-test-subj-selector/tsconfig.json b/packages/kbn-test-subj-selector/tsconfig.json index 3604f1004cf6c..a1e1c1af372c6 100644 --- a/packages/kbn-test-subj-selector/tsconfig.json +++ b/packages/kbn-test-subj-selector/tsconfig.json @@ -1,6 +1,9 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-test-subj-selector" + }, "include": [ "index.d.ts" - ], + ] } diff --git a/packages/kbn-test/package.json b/packages/kbn-test/package.json index 24655f8e57026..c84b0a93311bb 100644 --- a/packages/kbn-test/package.json +++ b/packages/kbn-test/package.json @@ -32,7 +32,7 @@ "parse-link-header": "^1.0.1", "rxjs": "^6.5.5", "strip-ansi": "^5.2.0", - "tar-fs": "^1.16.3", + "tar-fs": "^2.1.0", "tmp": "^0.1.0", "xml2js": "^0.4.22", "zlib": "^1.0.5" diff --git a/packages/kbn-test/src/functional_test_runner/lib/config/read_config_file.ts b/packages/kbn-test/src/functional_test_runner/lib/config/read_config_file.ts index 96fd525efa3ec..2e40aeec4f43d 100644 --- a/packages/kbn-test/src/functional_test_runner/lib/config/read_config_file.ts +++ b/packages/kbn-test/src/functional_test_runner/lib/config/read_config_file.ts @@ -21,7 +21,6 @@ import { ToolingLog } from '@kbn/dev-utils'; import { defaultsDeep } from 'lodash'; import { Config } from './config'; -import { transformDeprecations } from './transform_deprecations'; const cache = new WeakMap(); @@ -52,8 +51,7 @@ async function getSettingsFromFile(log: ToolingLog, path: string, settingOverrid await cache.get(configProvider)! ); - const logDeprecation = (error: string | Error) => log.error(error); - return transformDeprecations(settingsWithDefaults, logDeprecation); + return settingsWithDefaults; } export async function readConfigFile(log: ToolingLog, path: string, settingOverrides: any = {}) { diff --git a/packages/kbn-test/src/functional_test_runner/lib/config/transform_deprecations.ts b/packages/kbn-test/src/functional_test_runner/lib/config/transform_deprecations.ts deleted file mode 100644 index 08dfc4a4f61f4..0000000000000 --- a/packages/kbn-test/src/functional_test_runner/lib/config/transform_deprecations.ts +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// @ts-ignore -import { createTransform, Deprecations } from '../../../../../../src/legacy/deprecation'; - -type DeprecationTransformer = ( - settings: object, - log: (msg: string) => void -) => { - [key: string]: any; -}; - -export const transformDeprecations: DeprecationTransformer = createTransform([ - Deprecations.unused('servers.webdriver'), -]); diff --git a/packages/kbn-test/tsconfig.json b/packages/kbn-test/tsconfig.json index fdb53de52687b..fec35e45b2a15 100644 --- a/packages/kbn-test/tsconfig.json +++ b/packages/kbn-test/tsconfig.json @@ -1,5 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-test" + }, "include": [ "types/**/*", "src/**/*", diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js index 0f981f3d07610..365b84b83bd5f 100644 --- a/packages/kbn-ui-shared-deps/entry.js +++ b/packages/kbn-ui-shared-deps/entry.js @@ -54,6 +54,3 @@ export const ElasticEuiChartsTheme = require('@elastic/eui/dist/eui_charts_theme import * as Theme from './theme.ts'; export { Theme }; - -// massive deps that we should really get rid of or reduce in size substantially -export const ElasticsearchBrowser = require('elasticsearch-browser/elasticsearch.js'); diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js index 40e89f199b6a1..84ca3435e02bc 100644 --- a/packages/kbn-ui-shared-deps/index.js +++ b/packages/kbn-ui-shared-deps/index.js @@ -62,12 +62,5 @@ exports.externals = { '@elastic/eui/dist/eui_charts_theme': '__kbnSharedDeps__.ElasticEuiChartsTheme', '@elastic/eui/dist/eui_theme_light.json': '__kbnSharedDeps__.Theme.euiLightVars', '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.Theme.euiDarkVars', - - /** - * massive deps that we should really get rid of or reduce in size substantially - */ - elasticsearch: '__kbnSharedDeps__.ElasticsearchBrowser', - 'elasticsearch-browser': '__kbnSharedDeps__.ElasticsearchBrowser', - 'elasticsearch-browser/elasticsearch': '__kbnSharedDeps__.ElasticsearchBrowser', }; exports.publicPathLoader = require.resolve('./public_path_loader'); diff --git a/packages/kbn-ui-shared-deps/package.json b/packages/kbn-ui-shared-deps/package.json index 531513481b1d4..bbe7b1bc2e8da 100644 --- a/packages/kbn-ui-shared-deps/package.json +++ b/packages/kbn-ui-shared-deps/package.json @@ -9,8 +9,8 @@ "kbn:watch": "node scripts/build --dev --watch" }, "dependencies": { - "@elastic/charts": "19.8.1", - "@elastic/eui": "27.4.1", + "@elastic/charts": "21.1.2", + "@elastic/eui": "28.2.0", "@elastic/numeral": "^2.5.0", "@kbn/i18n": "1.0.0", "@kbn/monaco": "1.0.0", @@ -19,7 +19,6 @@ "compression-webpack-plugin": "^4.0.0", "core-js": "^3.6.4", "custom-event-polyfill": "^0.3.0", - "elasticsearch-browser": "^16.7.0", "jquery": "^3.5.0", "mini-css-extract-plugin": "0.8.0", "moment": "^2.24.0", diff --git a/packages/kbn-ui-shared-deps/tsconfig.json b/packages/kbn-ui-shared-deps/tsconfig.json index cef9a442d17bc..88699027f85de 100644 --- a/packages/kbn-ui-shared-deps/tsconfig.json +++ b/packages/kbn-ui-shared-deps/tsconfig.json @@ -1,5 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-ui-shared-deps" + }, "include": [ "index.d.ts", "theme.ts" diff --git a/packages/kbn-ui-shared-deps/webpack.config.js b/packages/kbn-ui-shared-deps/webpack.config.js index c81da4689052a..fa80dfdeef20f 100644 --- a/packages/kbn-ui-shared-deps/webpack.config.js +++ b/packages/kbn-ui-shared-deps/webpack.config.js @@ -32,22 +32,10 @@ exports.getWebpackConfig = ({ dev = false } = {}) => ({ mode: dev ? 'development' : 'production', entry: { 'kbn-ui-shared-deps': './entry.js', - 'kbn-ui-shared-deps.v7.dark': [ - '@elastic/eui/dist/eui_theme_dark.css', - '@elastic/charts/dist/theme_only_dark.css', - ], - 'kbn-ui-shared-deps.v7.light': [ - '@elastic/eui/dist/eui_theme_light.css', - '@elastic/charts/dist/theme_only_light.css', - ], - 'kbn-ui-shared-deps.v8.dark': [ - '@elastic/eui/dist/eui_theme_amsterdam_dark.css', - '@elastic/charts/dist/theme_only_dark.css', - ], - 'kbn-ui-shared-deps.v8.light': [ - '@elastic/eui/dist/eui_theme_amsterdam_light.css', - '@elastic/charts/dist/theme_only_light.css', - ], + 'kbn-ui-shared-deps.v7.dark': ['@elastic/eui/dist/eui_theme_dark.css'], + 'kbn-ui-shared-deps.v7.light': ['@elastic/eui/dist/eui_theme_light.css'], + 'kbn-ui-shared-deps.v8.dark': ['@elastic/eui/dist/eui_theme_amsterdam_dark.css'], + 'kbn-ui-shared-deps.v8.light': ['@elastic/eui/dist/eui_theme_amsterdam_light.css'], }, context: __dirname, devtool: dev ? '#cheap-source-map' : false, diff --git a/packages/kbn-utility-types/tsconfig.json b/packages/kbn-utility-types/tsconfig.json index 202df37faf561..03cace5b9cb2c 100644 --- a/packages/kbn-utility-types/tsconfig.json +++ b/packages/kbn-utility-types/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../tsconfig.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "declaration": true, "declarationDir": "./target", diff --git a/rfcs/text/0010_service_status.md b/rfcs/text/0010_service_status.md index ded594930a367..76195c4f1ab89 100644 --- a/rfcs/text/0010_service_status.md +++ b/rfcs/text/0010_service_status.md @@ -137,7 +137,7 @@ interface StatusSetup { * Current status for all dependencies of the current plugin. * Each key of the `Record` is a plugin id. */ - plugins$: Observable>; + dependencies$: Observable>; /** * The status of this plugin as derived from its dependencies. diff --git a/scripts/build_ts_refs.js b/scripts/build_ts_refs.js new file mode 100644 index 0000000000000..29fd66bab4ca9 --- /dev/null +++ b/scripts/build_ts_refs.js @@ -0,0 +1,20 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +require('../src/setup_node_env'); +require('../src/dev/typescript/build_refs').runBuildRefs(); diff --git a/src/legacy/utils/binder.ts b/src/cli/cluster/binder.ts similarity index 100% rename from src/legacy/utils/binder.ts rename to src/cli/cluster/binder.ts diff --git a/src/legacy/utils/binder_for.ts b/src/cli/cluster/binder_for.ts similarity index 100% rename from src/legacy/utils/binder_for.ts rename to src/cli/cluster/binder_for.ts diff --git a/src/cli/cluster/worker.ts b/src/cli/cluster/worker.ts index 097a549187429..c8a8a067d30bf 100644 --- a/src/cli/cluster/worker.ts +++ b/src/cli/cluster/worker.ts @@ -21,7 +21,7 @@ import _ from 'lodash'; import cluster from 'cluster'; import { EventEmitter } from 'events'; -import { BinderFor } from '../../legacy/utils/binder_for'; +import { BinderFor } from './binder_for'; import { fromRoot } from '../../core/server/utils'; const cliPath = fromRoot('src/cli'); diff --git a/src/cli_keystore/add.js b/src/cli_keystore/add.js index 44737e387c2d2..232392f34c63b 100644 --- a/src/cli_keystore/add.js +++ b/src/cli_keystore/add.js @@ -18,8 +18,8 @@ */ import { Logger } from '../cli_plugin/lib/logger'; -import { confirm, question } from '../legacy/server/utils'; -import { createPromiseFromStreams, createConcatStream } from '../legacy/utils'; +import { confirm, question } from './utils'; +import { createPromiseFromStreams, createConcatStream } from '../core/server/utils'; /** * @param {Keystore} keystore diff --git a/src/cli_keystore/add.test.js b/src/cli_keystore/add.test.js index b5d5009667eb4..f1adee8879bc2 100644 --- a/src/cli_keystore/add.test.js +++ b/src/cli_keystore/add.test.js @@ -42,7 +42,7 @@ import { PassThrough } from 'stream'; import { Keystore } from '../legacy/server/keystore'; import { add } from './add'; import { Logger } from '../cli_plugin/lib/logger'; -import * as prompt from '../legacy/server/utils/prompt'; +import * as prompt from './utils/prompt'; describe('Kibana keystore', () => { describe('add', () => { diff --git a/src/cli_keystore/create.js b/src/cli_keystore/create.js index 8be1eb36882f1..55fe2c151dec0 100644 --- a/src/cli_keystore/create.js +++ b/src/cli_keystore/create.js @@ -18,7 +18,7 @@ */ import { Logger } from '../cli_plugin/lib/logger'; -import { confirm } from '../legacy/server/utils'; +import { confirm } from './utils'; export async function create(keystore, command, options) { const logger = new Logger(options); diff --git a/src/cli_keystore/create.test.js b/src/cli_keystore/create.test.js index f48b3775ddfff..cb85475eab1cb 100644 --- a/src/cli_keystore/create.test.js +++ b/src/cli_keystore/create.test.js @@ -41,7 +41,7 @@ import sinon from 'sinon'; import { Keystore } from '../legacy/server/keystore'; import { create } from './create'; import { Logger } from '../cli_plugin/lib/logger'; -import * as prompt from '../legacy/server/utils/prompt'; +import * as prompt from './utils/prompt'; describe('Kibana keystore', () => { describe('create', () => { diff --git a/src/legacy/server/utils/index.js b/src/cli_keystore/utils/index.js similarity index 100% rename from src/legacy/server/utils/index.js rename to src/cli_keystore/utils/index.js diff --git a/src/legacy/server/utils/prompt.js b/src/cli_keystore/utils/prompt.js similarity index 100% rename from src/legacy/server/utils/prompt.js rename to src/cli_keystore/utils/prompt.js diff --git a/src/legacy/server/utils/prompt.test.js b/src/cli_keystore/utils/prompt.test.js similarity index 100% rename from src/legacy/server/utils/prompt.test.js rename to src/cli_keystore/utils/prompt.test.js diff --git a/src/core/TESTING.md b/src/core/TESTING.md index a62922d9b5d64..a0fd0a6ffc255 100644 --- a/src/core/TESTING.md +++ b/src/core/TESTING.md @@ -330,7 +330,7 @@ Cons: To have access to Kibana TestUtils, you should create `integration_tests` folder and import `test_utils` within a test file: ```typescript // src/plugins/my_plugin/server/integration_tests/formatter.test.ts -import * as kbnTestServer from 'src/test_utils/kbn_server'; +import * as kbnTestServer from 'src/core/test_helpers/kbn_server'; describe('myPlugin', () => { describe('GET /myPlugin/formatter', () => { diff --git a/src/core/public/application/__snapshots__/application_service.test.ts.snap b/src/core/public/application/__snapshots__/application_service.test.ts.snap index c63a22170c4f6..a6c9eb27e338a 100644 --- a/src/core/public/application/__snapshots__/application_service.test.ts.snap +++ b/src/core/public/application/__snapshots__/application_service.test.ts.snap @@ -80,6 +80,7 @@ exports[`#start() getComponent returns renderable JSX tree 1`] = ` } } mounters={Map {}} + setAppActionMenu={[Function]} setAppLeaveHandler={[Function]} setIsMounting={[Function]} /> diff --git a/src/core/public/application/application_service.mock.ts b/src/core/public/application/application_service.mock.ts index 47a8a01d917eb..5609e7eb5d17d 100644 --- a/src/core/public/application/application_service.mock.ts +++ b/src/core/public/application/application_service.mock.ts @@ -20,6 +20,7 @@ import { History } from 'history'; import { BehaviorSubject, Subject } from 'rxjs'; +import type { MountPoint } from '../types'; import { capabilitiesServiceMock } from './capabilities/capabilities_service.mock'; import { ApplicationSetup, @@ -27,7 +28,6 @@ import { ApplicationStart, InternalApplicationSetup, PublicAppInfo, - PublicLegacyAppInfo, } from './types'; import { ApplicationServiceContract } from './test_types'; @@ -39,7 +39,6 @@ const createSetupContractMock = (): jest.Mocked => ({ const createInternalSetupContractMock = (): jest.Mocked => ({ register: jest.fn(), - registerLegacyApp: jest.fn(), registerAppUpdater: jest.fn(), registerMountContext: jest.fn(), }); @@ -48,7 +47,7 @@ const createStartContractMock = (): jest.Mocked => { const currentAppId$ = new Subject(); return { - applications$: new BehaviorSubject>(new Map()), + applications$: new BehaviorSubject>(new Map()), currentAppId$: currentAppId$.asObservable(), capabilities: capabilitiesServiceMock.createStartContract().capabilities, navigateToApp: jest.fn(), @@ -84,9 +83,10 @@ const createInternalStartContractMock = (): jest.Mocked(); return { - applications$: new BehaviorSubject>(new Map()), + applications$: new BehaviorSubject>(new Map()), capabilities: capabilitiesServiceMock.createStartContract().capabilities, currentAppId$: currentAppId$.asObservable(), + currentActionMenu$: new BehaviorSubject(undefined), getComponent: jest.fn(), getUrlForApp: jest.fn(), navigateToApp: jest.fn().mockImplementation((appId) => currentAppId$.next(appId)), diff --git a/src/core/public/application/application_service.test.ts b/src/core/public/application/application_service.test.ts index d0c2ac111eb1f..afcebc06506c2 100644 --- a/src/core/public/application/application_service.test.ts +++ b/src/core/public/application/application_service.test.ts @@ -28,21 +28,12 @@ import { BehaviorSubject, Subject } from 'rxjs'; import { bufferCount, take, takeUntil } from 'rxjs/operators'; import { shallow, mount } from 'enzyme'; -import { injectedMetadataServiceMock } from '../injected_metadata/injected_metadata_service.mock'; import { contextServiceMock } from '../context/context_service.mock'; import { httpServiceMock } from '../http/http_service.mock'; import { overlayServiceMock } from '../overlays/overlay_service.mock'; import { MockLifecycle } from './test_types'; import { ApplicationService } from './application_service'; -import { - App, - PublicAppInfo, - AppNavLinkStatus, - AppStatus, - AppUpdater, - LegacyApp, - PublicLegacyAppInfo, -} from './types'; +import { App, PublicAppInfo, AppNavLinkStatus, AppStatus, AppUpdater } from './types'; import { act } from 'react-dom/test-utils'; const createApp = (props: Partial): App => { @@ -54,15 +45,6 @@ const createApp = (props: Partial): App => { }; }; -const createLegacyApp = (props: Partial): LegacyApp => { - return { - id: 'some-id', - title: 'some-title', - appUrl: '/my-url', - ...props, - }; -}; - let setupDeps: MockLifecycle<'setup'>; let startDeps: MockLifecycle<'start'>; let service: ApplicationService; @@ -73,10 +55,8 @@ describe('#setup()', () => { setupDeps = { http, context: contextServiceMock.createSetupContract(), - injectedMetadata: injectedMetadataServiceMock.createSetupContract(), redirectTo: jest.fn(), }; - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(false); startDeps = { http, overlays: overlayServiceMock.createStartContract() }; service = new ApplicationService(); }); @@ -116,7 +96,6 @@ describe('#setup()', () => { expect(applications.get('app1')).toEqual( expect.objectContaining({ id: 'app1', - legacy: false, navLinkStatus: AppNavLinkStatus.visible, status: AppStatus.accessible, }) @@ -124,7 +103,6 @@ describe('#setup()', () => { expect(applications.get('app2')).toEqual( expect.objectContaining({ id: 'app2', - legacy: false, navLinkStatus: AppNavLinkStatus.visible, status: AppStatus.accessible, }) @@ -141,7 +119,6 @@ describe('#setup()', () => { expect(applications.get('app1')).toEqual( expect.objectContaining({ id: 'app1', - legacy: false, navLinkStatus: AppNavLinkStatus.hidden, status: AppStatus.inaccessible, defaultPath: 'foo/bar', @@ -151,7 +128,6 @@ describe('#setup()', () => { expect(applications.get('app2')).toEqual( expect.objectContaining({ id: 'app2', - legacy: false, navLinkStatus: AppNavLinkStatus.visible, status: AppStatus.accessible, }) @@ -159,7 +135,7 @@ describe('#setup()', () => { }); it('throws an error if an App with the same appRoute is registered', () => { - const { register, registerLegacyApp } = service.setup(setupDeps); + const { register } = service.setup(setupDeps); register(Symbol(), createApp({ id: 'app1' })); @@ -168,7 +144,6 @@ describe('#setup()', () => { ).toThrowErrorMatchingInlineSnapshot( `"An application is already registered with the appRoute \\"/app/app1\\""` ); - expect(() => registerLegacyApp(createLegacyApp({ id: 'app1' }))).toThrow(); register(Symbol(), createApp({ id: 'app-next', appRoute: '/app/app3' })); @@ -177,7 +152,6 @@ describe('#setup()', () => { ).toThrowErrorMatchingInlineSnapshot( `"An application is already registered with the appRoute \\"/app/app3\\""` ); - expect(() => registerLegacyApp(createLegacyApp({ id: 'app3' }))).not.toThrow(); }); it('throws an error if an App starts with the HTTP base path', () => { @@ -195,41 +169,6 @@ describe('#setup()', () => { }); }); - describe('registerLegacyApp', () => { - it('throws an error if two apps with the same id are registered', () => { - const { registerLegacyApp } = service.setup(setupDeps); - - registerLegacyApp(createLegacyApp({ id: 'app2' })); - expect(() => - registerLegacyApp(createLegacyApp({ id: 'app2' })) - ).toThrowErrorMatchingInlineSnapshot( - `"An application is already registered with the id \\"app2\\""` - ); - }); - - it('throws error if additional apps are registered after setup', async () => { - const { registerLegacyApp } = service.setup(setupDeps); - - await service.start(startDeps); - expect(() => - registerLegacyApp(createLegacyApp({ id: 'app2' })) - ).toThrowErrorMatchingInlineSnapshot(`"Applications cannot be registered after \\"setup\\""`); - }); - - it('throws an error if a LegacyApp with the same appRoute is registered', () => { - const { register, registerLegacyApp } = service.setup(setupDeps); - - registerLegacyApp(createLegacyApp({ id: 'app1' })); - - expect(() => - register(Symbol(), createApp({ id: 'app2', appRoute: '/app/app1' })) - ).toThrowErrorMatchingInlineSnapshot( - `"An application is already registered with the appRoute \\"/app/app1\\""` - ); - expect(() => registerLegacyApp(createLegacyApp({ id: 'app1:other' }))).not.toThrow(); - }); - }); - describe('registerAppUpdater', () => { it('updates status fields', async () => { const setup = service.setup(setupDeps); @@ -258,7 +197,6 @@ describe('#setup()', () => { expect(applications.get('app1')).toEqual( expect.objectContaining({ id: 'app1', - legacy: false, navLinkStatus: AppNavLinkStatus.disabled, status: AppStatus.inaccessible, tooltip: 'App inaccessible due to reason', @@ -267,7 +205,6 @@ describe('#setup()', () => { expect(applications.get('app2')).toEqual( expect.objectContaining({ id: 'app2', - legacy: false, navLinkStatus: AppNavLinkStatus.visible, status: AppStatus.accessible, tooltip: 'App accessible', @@ -307,7 +244,6 @@ describe('#setup()', () => { expect(applications.get('app1')).toEqual( expect.objectContaining({ id: 'app1', - legacy: false, navLinkStatus: AppNavLinkStatus.disabled, status: AppStatus.inaccessible, tooltip: 'App inaccessible due to reason', @@ -316,7 +252,6 @@ describe('#setup()', () => { expect(applications.get('app2')).toEqual( expect.objectContaining({ id: 'app2', - legacy: false, status: AppStatus.inaccessible, navLinkStatus: AppNavLinkStatus.hidden, }) @@ -352,7 +287,6 @@ describe('#setup()', () => { expect(applications.get('app1')).toEqual( expect.objectContaining({ id: 'app1', - legacy: false, navLinkStatus: AppNavLinkStatus.disabled, status: AppStatus.inaccessible, }) @@ -374,10 +308,7 @@ describe('#setup()', () => { setup.registerAppUpdater(statusUpdater); const start = await service.start(startDeps); - let latestValue: ReadonlyMap = new Map< - string, - PublicAppInfo | PublicLegacyAppInfo - >(); + let latestValue: ReadonlyMap = new Map(); start.applications$.subscribe((apps) => { latestValue = apps; }); @@ -385,7 +316,6 @@ describe('#setup()', () => { expect(latestValue.get('app1')).toEqual( expect.objectContaining({ id: 'app1', - legacy: false, status: AppStatus.inaccessible, navLinkStatus: AppNavLinkStatus.disabled, }) @@ -401,43 +331,12 @@ describe('#setup()', () => { expect(latestValue.get('app1')).toEqual( expect.objectContaining({ id: 'app1', - legacy: false, status: AppStatus.accessible, navLinkStatus: AppNavLinkStatus.hidden, }) ); }); - it('also updates legacy apps', async () => { - const setup = service.setup(setupDeps); - - setup.registerLegacyApp(createLegacyApp({ id: 'app1' })); - - setup.registerAppUpdater( - new BehaviorSubject((app) => { - return { - status: AppStatus.inaccessible, - navLinkStatus: AppNavLinkStatus.hidden, - tooltip: 'App inaccessible due to reason', - }; - }) - ); - - const start = await service.start(startDeps); - const applications = await start.applications$.pipe(take(1)).toPromise(); - - expect(applications.size).toEqual(1); - expect(applications.get('app1')).toEqual( - expect.objectContaining({ - id: 'app1', - legacy: true, - status: AppStatus.inaccessible, - navLinkStatus: AppNavLinkStatus.hidden, - tooltip: 'App inaccessible due to reason', - }) - ); - }); - it('allows to update the basePath', async () => { const setup = service.setup(setupDeps); @@ -486,10 +385,8 @@ describe('#start()', () => { setupDeps = { http, context: contextServiceMock.createSetupContract(), - injectedMetadata: injectedMetadataServiceMock.createSetupContract(), redirectTo: jest.fn(), }; - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(false); startDeps = { http, overlays: overlayServiceMock.createStartContract() }; service = new ApplicationService(); }); @@ -507,11 +404,10 @@ describe('#start()', () => { }); it('exposes available apps', async () => { - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); - const { register, registerLegacyApp } = service.setup(setupDeps); + const { register } = service.setup(setupDeps); register(Symbol(), createApp({ id: 'app1' })); - registerLegacyApp(createLegacyApp({ id: 'app2' })); + register(Symbol(), createApp({ id: 'app2' })); const { applications$ } = await service.start(startDeps); const availableApps = await applications$.pipe(take(1)).toPromise(); @@ -522,16 +418,14 @@ describe('#start()', () => { expect.objectContaining({ appRoute: '/app/app1', id: 'app1', - legacy: false, navLinkStatus: AppNavLinkStatus.visible, status: AppStatus.accessible, }) ); expect(availableApps.get('app2')).toEqual( expect.objectContaining({ - appUrl: '/my-url', + appRoute: '/app/app2', id: 'app2', - legacy: true, navLinkStatus: AppNavLinkStatus.visible, status: AppStatus.accessible, }) @@ -558,39 +452,19 @@ describe('#start()', () => { navLinks: { app1: true, app2: false, - legacyApp1: true, - legacyApp2: false, }, }, } as any); - const { register, registerLegacyApp } = service.setup(setupDeps); + const { register } = service.setup(setupDeps); register(Symbol(), createApp({ id: 'app1' })); - registerLegacyApp(createLegacyApp({ id: 'legacyApp1' })); register(Symbol(), createApp({ id: 'app2' })); - registerLegacyApp(createLegacyApp({ id: 'legacyApp2' })); const { applications$ } = await service.start(startDeps); const availableApps = await applications$.pipe(take(1)).toPromise(); - expect([...availableApps.keys()]).toEqual(['app1', 'legacyApp1']); - }); - - describe('currentAppId$', () => { - it('emits the legacy app id when in legacy mode', async () => { - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); - setupDeps.injectedMetadata.getLegacyMetadata.mockReturnValue({ - app: { - id: 'legacy', - title: 'Legacy App', - }, - } as any); - await service.setup(setupDeps); - const { currentAppId$ } = await service.start(startDeps); - - expect(await currentAppId$.pipe(take(1)).toPromise()).toEqual('legacy'); - }); + expect([...availableApps.keys()]).toEqual(['app1']); }); describe('getComponent', () => { @@ -602,16 +476,6 @@ describe('#start()', () => { expect(() => shallow(createElement(getComponent))).not.toThrow(); expect(getComponent()).toMatchSnapshot(); }); - - it('renders null when in legacy mode', async () => { - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); - service.setup(setupDeps); - - const { getComponent } = await service.start(startDeps); - - expect(() => shallow(createElement(getComponent))).not.toThrow(); - expect(getComponent()).toBe(null); - }); }); describe('getUrlForApp', () => { @@ -624,16 +488,14 @@ describe('#start()', () => { }); it('creates URL for registered appId', async () => { - const { register, registerLegacyApp } = service.setup(setupDeps); + const { register } = service.setup(setupDeps); register(Symbol(), createApp({ id: 'app1' })); - registerLegacyApp(createLegacyApp({ id: 'legacyApp1' })); register(Symbol(), createApp({ id: 'app2', appRoute: '/custom/path' })); const { getUrlForApp } = await service.start(startDeps); expect(getUrlForApp('app1')).toBe('/base-path/app/app1'); - expect(getUrlForApp('legacyApp1')).toBe('/base-path/app/legacyApp1'); expect(getUrlForApp('app2')).toBe('/base-path/custom/path'); }); @@ -800,16 +662,6 @@ describe('#start()', () => { expect(MockHistory.push).toHaveBeenCalledWith('/custom/path', 'my-state'); }); - it('redirects when in legacyMode', async () => { - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); - service.setup(setupDeps); - - const { navigateToApp } = await service.start(startDeps); - - await navigateToApp('myTestApp'); - expect(setupDeps.redirectTo).toHaveBeenCalledWith('/base-path/app/myTestApp'); - }); - it('updates currentApp$ after mounting', async () => { service.setup(setupDeps); @@ -903,31 +755,6 @@ describe('#start()', () => { `); }); - it('sets window.location.href when navigating to legacy apps', async () => { - setupDeps.http = httpServiceMock.createSetupContract({ basePath: '/test' }); - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); - service.setup(setupDeps); - - const { navigateToApp } = await service.start(startDeps); - - await navigateToApp('alpha'); - expect(setupDeps.redirectTo).toHaveBeenCalledWith('/test/app/alpha'); - }); - - it('handles legacy apps with subapps', async () => { - setupDeps.http = httpServiceMock.createSetupContract({ basePath: '/test' }); - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); - - const { registerLegacyApp } = service.setup(setupDeps); - - registerLegacyApp(createLegacyApp({ id: 'baseApp:legacyApp1' })); - - const { navigateToApp } = await service.start(startDeps); - - await navigateToApp('baseApp:legacyApp1'); - expect(setupDeps.redirectTo).toHaveBeenCalledWith('/test/app/baseApp'); - }); - describe('when `replace` option is true', () => { it('use `history.replace` instead of `history.push`', async () => { service.setup(setupDeps); @@ -973,16 +800,6 @@ describe('#start()', () => { undefined ); }); - it('do not change the behavior when in legacy mode', async () => { - setupDeps.http = httpServiceMock.createSetupContract({ basePath: '/test' }); - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); - service.setup(setupDeps); - - const { navigateToApp } = await service.start(startDeps); - - await navigateToApp('alpha', { replace: true }); - expect(setupDeps.redirectTo).toHaveBeenCalledWith('/test/app/alpha'); - }); }); describe('when `replace` option is false', () => { @@ -1040,9 +857,7 @@ describe('#stop()', () => { setupDeps = { http, context: contextServiceMock.createSetupContract(), - injectedMetadata: injectedMetadataServiceMock.createSetupContract(), }; - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(false); startDeps = { http, overlays: overlayServiceMock.createStartContract() }; service = new ApplicationService(); }); diff --git a/src/core/public/application/application_service.tsx b/src/core/public/application/application_service.tsx index d7f15decb255d..0d08f6f3007b0 100644 --- a/src/core/public/application/application_service.tsx +++ b/src/core/public/application/application_service.tsx @@ -22,7 +22,7 @@ import { BehaviorSubject, Observable, Subject, Subscription } from 'rxjs'; import { map, shareReplay, takeUntil, distinctUntilChanged, filter } from 'rxjs/operators'; import { createBrowserHistory, History } from 'history'; -import { InjectedMetadataSetup } from '../injected_metadata'; +import { MountPoint } from '../types'; import { HttpSetup, HttpStart } from '../http'; import { OverlayStart } from '../overlays'; import { ContextSetup, IContextContainer } from '../context'; @@ -31,7 +31,6 @@ import { AppRouter } from './ui'; import { Capabilities, CapabilitiesService } from './capabilities'; import { App, - AppBase, AppLeaveHandler, AppMount, AppMountDeprecated, @@ -41,8 +40,6 @@ import { AppUpdater, InternalApplicationSetup, InternalApplicationStart, - LegacyApp, - LegacyAppMounter, Mounter, NavigateToAppOptions, } from './types'; @@ -52,9 +49,8 @@ import { appendAppPath, parseAppUrl, relativeToAbsolute, getAppInfo } from './ut interface SetupDeps { context: ContextSetup; http: HttpSetup; - injectedMetadata: InjectedMetadataSetup; history?: History; - /** Used to redirect to external urls (and legacy apps) */ + /** Used to redirect to external urls */ redirectTo?: (path: string) => void; } @@ -90,16 +86,22 @@ interface AppUpdaterWrapper { updater: AppUpdater; } +interface AppInternalState { + leaveHandler?: AppLeaveHandler; + actionMenu?: MountPoint; +} + /** * Service that is responsible for registering new applications. * @internal */ export class ApplicationService { - private readonly apps = new Map | LegacyApp>(); + private readonly apps = new Map>(); private readonly mounters = new Map(); private readonly capabilities = new CapabilitiesService(); - private readonly appLeaveHandlers = new Map(); + private readonly appInternalStates = new Map(); private currentAppId$ = new BehaviorSubject(undefined); + private currentActionMenu$ = new BehaviorSubject(undefined); private readonly statusUpdaters$ = new BehaviorSubject>(new Map()); private readonly subscriptions: Subscription[] = []; private stop$ = new Subject(); @@ -112,28 +114,17 @@ export class ApplicationService { public setup({ context, http: { basePath }, - injectedMetadata, redirectTo = (path: string) => { window.location.assign(path); }, history, }: SetupDeps): InternalApplicationSetup { const basename = basePath.get(); - if (injectedMetadata.getLegacyMode()) { - this.currentAppId$.next(injectedMetadata.getLegacyMetadata().app.id); - } else { - // Only setup history if we're not in legacy mode - this.history = history || createBrowserHistory({ basename }); - } + this.history = history || createBrowserHistory({ basename }); this.navigate = (url, state, replace) => { - if (this.history) { - // basePath not needed here because `history` is configured with basename - return replace ? this.history.replace(url, state) : this.history.push(url, state); - } else { - // If we do not have history available (legacy mode), use redirectTo to do a full page refresh. - return redirectTo(basePath.prepend(url)); - } + // basePath not needed here because `history` is configured with basename + return replace ? this.history!.replace(url, state) : this.history!.push(url, state); }; this.redirectTo = redirectTo; @@ -193,7 +184,6 @@ export class ApplicationService { ...appProps, status: app.status ?? AppStatus.accessible, navLinkStatus: app.navLinkStatus ?? AppNavLinkStatus.default, - legacy: false, }); if (updater$) { registerStatusUpdater(app.id, updater$); @@ -204,43 +194,6 @@ export class ApplicationService { exactRoute: app.exactRoute ?? false, mount: wrapMount(plugin, app), unmountBeforeMounting: false, - legacy: false, - }); - }, - registerLegacyApp: (app) => { - const appRoute = `/app/${app.id.split(':')[0]}`; - - if (this.registrationClosed) { - throw new Error('Applications cannot be registered after "setup"'); - } else if (this.apps.has(app.id)) { - throw new Error(`An application is already registered with the id "${app.id}"`); - } else if (basename && appRoute!.startsWith(`${basename}/`)) { - throw new Error('Cannot register an application route that includes HTTP base path'); - } - - const appBasePath = basePath.prepend(appRoute); - const mount: LegacyAppMounter = ({ history: appHistory }) => { - redirectTo(appHistory.createHref(appHistory.location)); - window.location.reload(); - }; - - const { updater$, ...appProps } = app; - this.apps.set(app.id, { - ...appProps, - status: app.status ?? AppStatus.accessible, - navLinkStatus: app.navLinkStatus ?? AppNavLinkStatus.default, - legacy: true, - }); - if (updater$) { - registerStatusUpdater(app.id, updater$); - } - this.mounters.set(app.id, { - appRoute, - appBasePath, - exactRoute: false, - mount, - unmountBeforeMounting: true, - legacy: true, }); }, registerAppUpdater: (appUpdater$: Observable) => @@ -293,12 +246,14 @@ export class ApplicationService { if (path === undefined) { path = applications$.value.get(appId)?.defaultPath; } - this.appLeaveHandlers.delete(this.currentAppId$.value!); + this.appInternalStates.delete(this.currentAppId$.value!); this.navigate!(getAppUrl(availableMounters, appId, path), state, replace); this.currentAppId$.next(appId); } }; + this.currentAppId$.subscribe(() => this.refreshCurrentActionMenu()); + return { applications$: applications$.pipe( map((apps) => new Map([...apps.entries()].map(([id, app]) => [id, getAppInfo(app)]))), @@ -310,7 +265,11 @@ export class ApplicationService { distinctUntilChanged(), takeUntil(this.stop$) ), - history: this.history, + currentActionMenu$: this.currentActionMenu$.pipe( + distinctUntilChanged(), + takeUntil(this.stop$) + ), + history: this.history!, registerMountContext: this.mountContext.registerContext, getUrlForApp: ( appId, @@ -338,6 +297,7 @@ export class ApplicationService { mounters={availableMounters} appStatuses$={applicationStatuses$} setAppLeaveHandler={this.setAppLeaveHandler} + setAppActionMenu={this.setAppActionMenu} setIsMounting={(isMounting) => httpLoadingCount$.next(isMounting ? 1 : 0)} /> ); @@ -346,7 +306,24 @@ export class ApplicationService { } private setAppLeaveHandler = (appId: string, handler: AppLeaveHandler) => { - this.appLeaveHandlers.set(appId, handler); + this.appInternalStates.set(appId, { + ...(this.appInternalStates.get(appId) ?? {}), + leaveHandler: handler, + }); + }; + + private setAppActionMenu = (appId: string, mount: MountPoint | undefined) => { + this.appInternalStates.set(appId, { + ...(this.appInternalStates.get(appId) ?? {}), + actionMenu: mount, + }); + this.refreshCurrentActionMenu(); + }; + + private refreshCurrentActionMenu = () => { + const appId = this.currentAppId$.getValue(); + const currentActionMenu = appId ? this.appInternalStates.get(appId)?.actionMenu : undefined; + this.currentActionMenu$.next(currentActionMenu); }; private async shouldNavigate(overlays: OverlayStart): Promise { @@ -354,7 +331,7 @@ export class ApplicationService { if (currentAppId === undefined) { return true; } - const action = getLeaveAction(this.appLeaveHandlers.get(currentAppId)); + const action = getLeaveAction(this.appInternalStates.get(currentAppId)?.leaveHandler); if (isConfirmAction(action)) { const confirmed = await overlays.openConfirm(action.text, { title: action.title, @@ -372,7 +349,7 @@ export class ApplicationService { if (currentAppId === undefined) { return; } - const action = getLeaveAction(this.appLeaveHandlers.get(currentAppId)); + const action = getLeaveAction(this.appInternalStates.get(currentAppId)?.leaveHandler); if (isConfirmAction(action)) { event.preventDefault(); // some browsers accept a string return value being the message displayed @@ -383,13 +360,14 @@ export class ApplicationService { public stop() { this.stop$.next(); this.currentAppId$.complete(); + this.currentActionMenu$.complete(); this.statusUpdaters$.complete(); this.subscriptions.forEach((sub) => sub.unsubscribe()); window.removeEventListener('beforeunload', this.onBeforeUnload); } } -const updateStatus = (app: T, statusUpdaters: AppUpdaterWrapper[]): T => { +const updateStatus = (app: App, statusUpdaters: AppUpdaterWrapper[]): App => { let changes: Partial = {}; statusUpdaters.forEach((wrapper) => { if (wrapper.application !== allApplicationsFilter && wrapper.application !== app.id) { diff --git a/src/core/public/application/index.ts b/src/core/public/application/index.ts index 121f0c7ac07d6..4f3b113a29c9b 100644 --- a/src/core/public/application/index.ts +++ b/src/core/public/application/index.ts @@ -22,7 +22,6 @@ export { Capabilities } from './capabilities'; export { ScopedHistory } from './scoped_history'; export { App, - AppBase, AppMount, AppMountDeprecated, AppUnmount, @@ -39,10 +38,8 @@ export { AppLeaveAction, AppLeaveDefaultAction, AppLeaveConfirmAction, - LegacyApp, NavigateToAppOptions, PublicAppInfo, - PublicLegacyAppInfo, // Internal types InternalApplicationSetup, InternalApplicationStart, diff --git a/src/core/public/application/integration_tests/application_service.test.tsx b/src/core/public/application/integration_tests/application_service.test.tsx index b0419d276dfa1..d28486928b7e2 100644 --- a/src/core/public/application/integration_tests/application_service.test.tsx +++ b/src/core/public/application/integration_tests/application_service.test.tsx @@ -25,11 +25,11 @@ import { createRenderer } from './utils'; import { ApplicationService } from '../application_service'; import { httpServiceMock } from '../../http/http_service.mock'; import { contextServiceMock } from '../../context/context_service.mock'; -import { injectedMetadataServiceMock } from '../../injected_metadata/injected_metadata_service.mock'; import { MockLifecycle } from '../test_types'; import { overlayServiceMock } from '../../overlays/overlay_service.mock'; import { AppMountParameters } from '../types'; -import { ScopedHistory } from '../scoped_history'; +import { Observable } from 'rxjs'; +import { MountPoint } from 'kibana/public'; const flushPromises = () => new Promise((resolve) => setImmediate(resolve)); @@ -54,10 +54,8 @@ describe('ApplicationService', () => { setupDeps = { http, context: contextServiceMock.createSetupContract(), - injectedMetadata: injectedMetadataServiceMock.createSetupContract(), history: history as any, }; - setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(false); startDeps = { http, overlays: overlayServiceMock.createStartContract() }; service = new ApplicationService(); }); @@ -147,54 +145,6 @@ describe('ApplicationService', () => { }); }); - describe('redirects', () => { - beforeAll(() => { - Object.defineProperty(window, 'location', { - value: { - reload: jest.fn(), - }, - }); - }); - - it('to full path when navigating to legacy app', async () => { - const redirectTo = jest.fn(); - - // In the real application, we use a BrowserHistory instance configured with `basename`. However, in tests we must - // use MemoryHistory which does not support `basename`. In order to emulate this behavior, we will wrap this - // instance with a ScopedHistory configured with a basepath. - history.push(setupDeps.http.basePath.get()); // ScopedHistory constructor will fail if underlying history is not currently at basePath. - const { register, registerLegacyApp } = service.setup({ - ...setupDeps, - redirectTo, - history: new ScopedHistory(history, setupDeps.http.basePath.get()), - }); - - register(Symbol(), { - id: 'app1', - title: 'App1', - mount: ({ onAppLeave }: AppMountParameters) => { - onAppLeave((actions) => actions.default()); - return () => undefined; - }, - }); - registerLegacyApp({ - id: 'myLegacyTestApp', - appUrl: '/app/myLegacyTestApp', - title: 'My Legacy Test App', - }); - - const { navigateToApp, getComponent } = await service.start(startDeps); - - update = createRenderer(getComponent()); - - await navigate('/test/app/app1'); - await act(() => navigateToApp('myLegacyTestApp', { path: '#/some-path' })); - - expect(redirectTo).toHaveBeenCalledWith('/test/app/myLegacyTestApp#/some-path'); - expect(window.location.reload).toHaveBeenCalled(); - }); - }); - describe('leaving an application that registered an app leave handler', () => { it('navigates to the new app if action is default', async () => { startDeps.overlays.openConfirm.mockResolvedValue(true); @@ -309,4 +259,189 @@ describe('ApplicationService', () => { expect(history.entries[1].pathname).toEqual('/app/app1'); }); }); + + describe('registering action menus', () => { + const getValue = (obs: Observable): Promise => { + return obs.pipe(take(1)).toPromise(); + }; + + const mounter1: MountPoint = () => () => undefined; + const mounter2: MountPoint = () => () => undefined; + + it('updates the observable value when an application is mounted', async () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { + id: 'app1', + title: 'App1', + mount: async ({ setHeaderActionMenu }: AppMountParameters) => { + setHeaderActionMenu(mounter1); + return () => undefined; + }, + }); + + const { navigateToApp, getComponent, currentActionMenu$ } = await service.start(startDeps); + update = createRenderer(getComponent()); + + expect(await getValue(currentActionMenu$)).toBeUndefined(); + + await act(async () => { + await navigateToApp('app1'); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBe(mounter1); + }); + + it('updates the observable value when switching application', async () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { + id: 'app1', + title: 'App1', + mount: async ({ setHeaderActionMenu }: AppMountParameters) => { + setHeaderActionMenu(mounter1); + return () => undefined; + }, + }); + register(Symbol(), { + id: 'app2', + title: 'App2', + mount: async ({ setHeaderActionMenu }: AppMountParameters) => { + setHeaderActionMenu(mounter2); + return () => undefined; + }, + }); + + const { navigateToApp, getComponent, currentActionMenu$ } = await service.start(startDeps); + update = createRenderer(getComponent()); + + await act(async () => { + await navigateToApp('app1'); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBe(mounter1); + + await act(async () => { + await navigateToApp('app2'); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBe(mounter2); + }); + + it('updates the observable value to undefined when switching to an application without action menu', async () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { + id: 'app1', + title: 'App1', + mount: async ({ setHeaderActionMenu }: AppMountParameters) => { + setHeaderActionMenu(mounter1); + return () => undefined; + }, + }); + register(Symbol(), { + id: 'app2', + title: 'App2', + mount: async ({}: AppMountParameters) => { + return () => undefined; + }, + }); + + const { navigateToApp, getComponent, currentActionMenu$ } = await service.start(startDeps); + update = createRenderer(getComponent()); + + await act(async () => { + await navigateToApp('app1'); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBe(mounter1); + + await act(async () => { + await navigateToApp('app2'); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBeUndefined(); + }); + + it('allow applications to call `setHeaderActionMenu` multiple times', async () => { + const { register } = service.setup(setupDeps); + + let resolveMount: () => void; + const promise = new Promise((resolve) => { + resolveMount = resolve; + }); + + register(Symbol(), { + id: 'app1', + title: 'App1', + mount: async ({ setHeaderActionMenu }: AppMountParameters) => { + setHeaderActionMenu(mounter1); + promise.then(() => { + setHeaderActionMenu(mounter2); + }); + return () => undefined; + }, + }); + + const { navigateToApp, getComponent, currentActionMenu$ } = await service.start(startDeps); + update = createRenderer(getComponent()); + + await act(async () => { + await navigateToApp('app1'); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBe(mounter1); + + await act(async () => { + resolveMount(); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBe(mounter2); + }); + + it('allow applications to unset the current menu', async () => { + const { register } = service.setup(setupDeps); + + let resolveMount: () => void; + const promise = new Promise((resolve) => { + resolveMount = resolve; + }); + + register(Symbol(), { + id: 'app1', + title: 'App1', + mount: async ({ setHeaderActionMenu }: AppMountParameters) => { + setHeaderActionMenu(mounter1); + promise.then(() => { + setHeaderActionMenu(undefined); + }); + return () => undefined; + }, + }); + + const { navigateToApp, getComponent, currentActionMenu$ } = await service.start(startDeps); + update = createRenderer(getComponent()); + + await act(async () => { + await navigateToApp('app1'); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBe(mounter1); + + await act(async () => { + resolveMount(); + await flushPromises(); + }); + + expect(await getValue(currentActionMenu$)).toBeUndefined(); + }); + }); }); diff --git a/src/core/public/application/integration_tests/router.test.tsx b/src/core/public/application/integration_tests/router.test.tsx index f992e121437a9..e3f992990f9f9 100644 --- a/src/core/public/application/integration_tests/router.test.tsx +++ b/src/core/public/application/integration_tests/router.test.tsx @@ -22,13 +22,12 @@ import { BehaviorSubject } from 'rxjs'; import { createMemoryHistory, History, createHashHistory } from 'history'; import { AppRouter, AppNotFound } from '../ui'; -import { EitherApp, MockedMounterMap, MockedMounterTuple } from '../test_types'; -import { createRenderer, createAppMounter, createLegacyAppMounter, getUnmounter } from './utils'; +import { MockedMounterMap, MockedMounterTuple } from '../test_types'; +import { createRenderer, createAppMounter, getUnmounter } from './utils'; import { AppStatus } from '../types'; -import { ScopedHistory } from '../scoped_history'; describe('AppRouter', () => { - let mounters: MockedMounterMap; + let mounters: MockedMounterMap; let globalHistory: History; let update: ReturnType; let scopedAppHistory: History; @@ -59,6 +58,7 @@ describe('AppRouter', () => { mounters={mockMountersToMounters()} appStatuses$={mountersToAppStatus$()} setAppLeaveHandler={noop} + setAppActionMenu={noop} setIsMounting={noop} /> ); @@ -66,9 +66,7 @@ describe('AppRouter', () => { beforeEach(() => { mounters = new Map([ createAppMounter({ appId: 'app1', html: 'App 1' }), - createLegacyAppMounter('legacyApp1', jest.fn()), createAppMounter({ appId: 'app2', html: '
App 2
' }), - createLegacyAppMounter('baseApp:legacyApp2', jest.fn()), createAppMounter({ appId: 'app3', html: '
Chromeless A
', @@ -80,7 +78,6 @@ describe('AppRouter', () => { appRoute: '/chromeless-b/path', }), createAppMounter({ appId: 'disabledApp', html: '
Disabled app
' }), - createLegacyAppMounter('disabledLegacyApp', jest.fn()), createAppMounter({ appId: 'scopedApp', extraMountHook: ({ history }) => { @@ -98,7 +95,7 @@ describe('AppRouter', () => { html: '
App 6
', appRoute: '/app/my-app/app6', }), - ] as Array>); + ] as MockedMounterTuple[]); globalHistory = createMemoryHistory(); update = createMountersRenderer(); }); @@ -383,26 +380,6 @@ describe('AppRouter', () => { expect(globalHistory.location.pathname).toEqual('/app/scopedApp/subpath'); }); - it('calls legacy mount handler', async () => { - await navigate('/app/legacyApp1'); - expect(mounters.get('legacyApp1')!.mounter.mount.mock.calls[0][0]).toMatchObject({ - appBasePath: '/app/legacyApp1', - element: expect.any(HTMLDivElement), - onAppLeave: expect.any(Function), - history: expect.any(ScopedHistory), - }); - }); - - it('handles legacy apps with subapps', async () => { - await navigate('/app/baseApp'); - expect(mounters.get('baseApp:legacyApp2')!.mounter.mount.mock.calls[0][0]).toMatchObject({ - appBasePath: '/app/baseApp', - element: expect.any(HTMLDivElement), - onAppLeave: expect.any(Function), - history: expect.any(ScopedHistory), - }); - }); - it('displays error page if no app is found', async () => { const dom = await navigate('/app/unknown'); @@ -414,10 +391,4 @@ describe('AppRouter', () => { expect(dom?.exists(AppNotFound)).toBe(true); }); - - it('displays error page if legacy app is inaccessible', async () => { - const dom = await navigate('/app/disabledLegacyApp'); - - expect(dom?.exists(AppNotFound)).toBe(true); - }); }); diff --git a/src/core/public/application/integration_tests/utils.tsx b/src/core/public/application/integration_tests/utils.tsx index 80a7fc2c2cad6..2ed9e0c495fb9 100644 --- a/src/core/public/application/integration_tests/utils.tsx +++ b/src/core/public/application/integration_tests/utils.tsx @@ -20,11 +20,10 @@ import React, { ReactElement } from 'react'; import { act } from 'react-dom/test-utils'; import { mount } from 'enzyme'; - import { I18nProvider } from '@kbn/i18n/react'; -import { App, LegacyApp, AppMountParameters } from '../types'; -import { EitherApp, MockedMounter, MockedMounterTuple, Mountable } from '../test_types'; +import { AppMountParameters } from '../types'; +import { MockedMounterTuple, Mountable } from '../test_types'; type Dom = ReturnType | null; type Renderer = () => Dom | Promise; @@ -55,7 +54,7 @@ export const createAppMounter = ({ appRoute?: string; exactRoute?: boolean; extraMountHook?: (params: AppMountParameters) => void; -}): MockedMounterTuple => { +}): MockedMounterTuple => { const unmount = jest.fn(); return [ appId, @@ -63,7 +62,6 @@ export const createAppMounter = ({ mounter: { appRoute, appBasePath: appRoute, - legacy: false, exactRoute, mount: jest.fn(async (params: AppMountParameters) => { const { appBasePath: basename, element } = params; @@ -82,24 +80,6 @@ export const createAppMounter = ({ ]; }; -export const createLegacyAppMounter = ( - appId: string, - legacyMount: MockedMounter['mount'] -): MockedMounterTuple => [ - appId, - { - mounter: { - appRoute: `/app/${appId.split(':')[0]}`, - appBasePath: `/app/${appId.split(':')[0]}`, - unmountBeforeMounting: true, - legacy: true, - exactRoute: false, - mount: legacyMount, - }, - unmount: jest.fn(), - }, -]; - -export function getUnmounter(app: Mountable) { +export function getUnmounter(app: Mountable) { return app.mounter.mount.mock.results[0].value; } diff --git a/src/core/public/application/test_types.ts b/src/core/public/application/test_types.ts index b822597e510cb..64012f0c0b6c1 100644 --- a/src/core/public/application/test_types.ts +++ b/src/core/public/application/test_types.ts @@ -17,28 +17,26 @@ * under the License. */ -import { App, LegacyApp, Mounter, AppUnmount } from './types'; +import { AppUnmount, Mounter } from './types'; import { ApplicationService } from './application_service'; /** @internal */ export type ApplicationServiceContract = PublicMethodsOf; /** @internal */ -export type EitherApp = App | LegacyApp; -/** @internal */ export type MockedUnmount = jest.Mocked; /** @internal */ -export interface Mountable { - mounter: MockedMounter; +export interface Mountable { + mounter: MockedMounter; unmount: MockedUnmount; } /** @internal */ -export type MockedMounter = jest.Mocked>>; +export type MockedMounter = jest.Mocked; /** @internal */ -export type MockedMounterTuple = [string, Mountable]; +export type MockedMounterTuple = [string, Mountable]; /** @internal */ -export type MockedMounterMap = Map>; +export type MockedMounterMap = Map; /** @internal */ export type MockLifecycle< T extends keyof ApplicationService, diff --git a/src/core/public/application/types.ts b/src/core/public/application/types.ts index 0fe97431b1569..df83b6e932aad 100644 --- a/src/core/public/application/types.ts +++ b/src/core/public/application/types.ts @@ -21,6 +21,7 @@ import { Observable } from 'rxjs'; import { History } from 'history'; import { RecursiveReadonly } from '@kbn/utility-types'; +import { MountPoint } from '../types'; import { Capabilities } from './capabilities'; import { ChromeStart } from '../chrome'; import { IContextProvider } from '../context'; @@ -35,8 +36,64 @@ import { SavedObjectsStart } from '../saved_objects'; import { AppCategory } from '../../types'; import { ScopedHistory } from './scoped_history'; -/** @public */ -export interface AppBase { +/** + * Accessibility status of an application. + * + * @public + */ +export enum AppStatus { + /** + * Application is accessible. + */ + accessible = 0, + /** + * Application is not accessible. + */ + inaccessible = 1, +} + +/** + * Status of the application's navLink. + * + * @public + */ +export enum AppNavLinkStatus { + /** + * The application navLink will be `visible` if the application's {@link AppStatus} is set to `accessible` + * and `hidden` if the application status is set to `inaccessible`. + */ + default = 0, + /** + * The application navLink is visible and clickable in the navigation bar. + */ + visible = 1, + /** + * The application navLink is visible but inactive and not clickable in the navigation bar. + */ + disabled = 2, + /** + * The application navLink does not appear in the navigation bar. + */ + hidden = 3, +} + +/** + * Defines the list of fields that can be updated via an {@link AppUpdater}. + * @public + */ +export type AppUpdatableFields = Pick; + +/** + * Updater for applications. + * see {@link ApplicationSetup} + * @public + */ +export type AppUpdater = (app: App) => Partial | undefined; + +/** + * @public + */ +export interface App { /** * The unique identifier of the application */ @@ -135,83 +192,12 @@ export interface AppBase { */ capabilities?: Partial; - /** - * Flag to keep track of legacy applications. - * For internal use only. any value will be overridden when registering an App. - * - * @internal - */ - legacy?: boolean; - /** * Hide the UI chrome when the application is mounted. Defaults to `false`. * Takes precedence over chrome service visibility settings. */ chromeless?: boolean; -} -/** - * Accessibility status of an application. - * - * @public - */ -export enum AppStatus { - /** - * Application is accessible. - */ - accessible = 0, - /** - * Application is not accessible. - */ - inaccessible = 1, -} - -/** - * Status of the application's navLink. - * - * @public - */ -export enum AppNavLinkStatus { - /** - * The application navLink will be `visible` if the application's {@link AppStatus} is set to `accessible` - * and `hidden` if the application status is set to `inaccessible`. - */ - default = 0, - /** - * The application navLink is visible and clickable in the navigation bar. - */ - visible = 1, - /** - * The application navLink is visible but inactive and not clickable in the navigation bar. - */ - disabled = 2, - /** - * The application navLink does not appear in the navigation bar. - */ - hidden = 3, -} - -/** - * Defines the list of fields that can be updated via an {@link AppUpdater}. - * @public - */ -export type AppUpdatableFields = Pick< - AppBase, - 'status' | 'navLinkStatus' | 'tooltip' | 'defaultPath' ->; - -/** - * Updater for applications. - * see {@link ApplicationSetup} - * @public - */ -export type AppUpdater = (app: AppBase) => Partial | undefined; - -/** - * Extension of {@link AppBase | common app properties} with the mount function. - * @public - */ -export interface App extends AppBase { /** * A mount function called when the user navigates to this app's route. May have signature of {@link AppMount} or * {@link AppMountDeprecated}. @@ -222,12 +208,6 @@ export interface App extends AppBase { */ mount: AppMount | AppMountDeprecated; - /** - * Hide the UI chrome when the application is mounted. Defaults to `false`. - * Takes precedence over chrome service visibility settings. - */ - chromeless?: boolean; - /** * Override the application's routing path from `/app/${id}`. * Must be unique across registered applications. Should not include the @@ -254,39 +234,18 @@ export interface App extends AppBase { exactRoute?: boolean; } -/** @public */ -export interface LegacyApp extends AppBase { - appUrl: string; - subUrlBase?: string; - linkToLastSubUrl?: boolean; - disableSubUrlTracking?: boolean; -} - /** * Public information about a registered {@link App | application} * * @public */ export type PublicAppInfo = Omit & { - legacy: false; // remove optional on fields populated with default values status: AppStatus; navLinkStatus: AppNavLinkStatus; appRoute: string; }; -/** - * Information about a registered {@link LegacyApp | legacy application} - * - * @public - */ -export type PublicLegacyAppInfo = Omit & { - legacy: true; - // remove optional on fields populated with default values - status: AppStatus; - navLinkStatus: AppNavLinkStatus; -}; - /** * A mount function called when the user navigates to this app's route. * @@ -299,6 +258,12 @@ export type AppMount = ( params: AppMountParameters ) => AppUnmount | Promise; +/** + * A function called when an application should be unmounted from the page. This function should be synchronous. + * @public + */ +export type AppUnmount = () => void; + /** * A mount function called when the user navigates to this app's route. * @@ -495,6 +460,37 @@ export interface AppMountParameters { * ``` */ onAppLeave: (handler: AppLeaveHandler) => void; + + /** + * A function that can be used to set the mount point used to populate the application action container + * in the chrome header. + * + * Calling the handler multiple time will erase the current content of the action menu with the mount from the latest call. + * Calling the handler with `undefined` will unmount the current mount point. + * Calling the handler after the application has been unmounted will have no effect. + * + * @example + * + * ```ts + * // application.tsx + * import React from 'react'; + * import ReactDOM from 'react-dom'; + * import { BrowserRouter, Route } from 'react-router-dom'; + * + * import { CoreStart, AppMountParameters } from 'src/core/public'; + * import { MyPluginDepsStart } from './plugin'; + * + * export renderApp = ({ element, history, setHeaderActionMenu }: AppMountParameters) => { + * const { renderApp } = await import('./application'); + * const { renderActionMenu } = await import('./action_menu'); + * setHeaderActionMenu((element) => { + * return renderActionMenu(element); + * }) + * return renderApp({ element, history }); + * } + * ``` + */ + setHeaderActionMenu: (menuMount: MountPoint | undefined) => void; } /** @@ -575,30 +571,14 @@ export interface AppLeaveActionFactory { default(): AppLeaveDefaultAction; } -/** - * A function called when an application should be unmounted from the page. This function should be synchronous. - * @public - */ -export type AppUnmount = () => void; - -/** @internal */ -export type AppMounter = (params: AppMountParameters) => Promise; - -/** @internal */ -export type LegacyAppMounter = (params: AppMountParameters) => void; - /** @internal */ -export type Mounter = SelectivePartial< - { - appRoute: string; - appBasePath: string; - mount: T extends LegacyApp ? LegacyAppMounter : AppMounter; - legacy: boolean; - exactRoute: boolean; - unmountBeforeMounting: T extends LegacyApp ? true : boolean; - }, - T extends LegacyApp ? never : 'unmountBeforeMounting' ->; +export interface Mounter { + appRoute: string; + appBasePath: string; + mount: AppMount; + exactRoute: boolean; + unmountBeforeMounting?: boolean; +} /** @internal */ export interface ParsedAppUrl { @@ -670,13 +650,6 @@ export interface InternalApplicationSetup extends Pick ): void; - /** - * Register metadata about legacy applications. Legacy apps will not be mounted when navigated to. - * @param app - * @internal - */ - registerLegacyApp(app: LegacyApp): void; - /** * Register a context provider for application mounting. Will only be available to applications that depend on the * plugin that registered this context. Deprecated, use {@link CoreSetup.getStartServices}. @@ -699,7 +672,7 @@ export interface InternalApplicationSetup extends Pick>; + applications$: Observable>; /** * Navigate to a given app @@ -821,14 +791,16 @@ export interface InternalApplicationStart extends Omit | undefined; -} + currentActionMenu$: Observable; -/** @internal */ -type SelectivePartial = Partial> & - Required>> extends infer U - ? { [P in keyof U]: U[P] } - : never; + /** + * The global history instance, exposed only to Core. + * @internal + */ + history: History; +} diff --git a/src/core/public/application/ui/app_container.test.tsx b/src/core/public/application/ui/app_container.test.tsx index a94313dd53abb..f6cde54e6f502 100644 --- a/src/core/public/application/ui/app_container.test.tsx +++ b/src/core/public/application/ui/app_container.test.tsx @@ -29,6 +29,7 @@ import { ScopedHistory } from '../scoped_history'; describe('AppContainer', () => { const appId = 'someApp'; const setAppLeaveHandler = jest.fn(); + const setAppActionMenu = jest.fn(); const setIsMounting = jest.fn(); beforeEach(() => { @@ -54,7 +55,6 @@ describe('AppContainer', () => { appBasePath: '/base-path', appRoute: '/some-route', unmountBeforeMounting: false, - legacy: false, exactRoute: false, mount: async ({ element }: AppMountParameters) => { await promise; @@ -76,6 +76,7 @@ describe('AppContainer', () => { appStatus={AppStatus.inaccessible} mounter={mounter} setAppLeaveHandler={setAppLeaveHandler} + setAppActionMenu={setAppActionMenu} setIsMounting={setIsMounting} createScopedHistory={(appPath: string) => // Create a history using the appPath as the current location @@ -116,6 +117,7 @@ describe('AppContainer', () => { appStatus={AppStatus.accessible} mounter={mounter} setAppLeaveHandler={setAppLeaveHandler} + setAppActionMenu={setAppActionMenu} setIsMounting={setIsMounting} createScopedHistory={(appPath: string) => // Create a history using the appPath as the current location @@ -143,7 +145,6 @@ describe('AppContainer', () => { appBasePath: '/base-path/some-route', appRoute: '/some-route', unmountBeforeMounting: false, - legacy: false, exactRoute: false, mount: async ({ element }: AppMountParameters) => { await waitPromise; @@ -158,6 +159,7 @@ describe('AppContainer', () => { appStatus={AppStatus.accessible} mounter={mounter} setAppLeaveHandler={setAppLeaveHandler} + setAppActionMenu={setAppActionMenu} setIsMounting={setIsMounting} createScopedHistory={(appPath: string) => // Create a history using the appPath as the current location diff --git a/src/core/public/application/ui/app_container.tsx b/src/core/public/application/ui/app_container.tsx index 332c31c64b6ba..f668cf851da55 100644 --- a/src/core/public/application/ui/app_container.tsx +++ b/src/core/public/application/ui/app_container.tsx @@ -25,8 +25,9 @@ import React, { useState, MutableRefObject, } from 'react'; - import { EuiLoadingSpinner } from '@elastic/eui'; + +import type { MountPoint } from '../../types'; import { AppLeaveHandler, AppStatus, AppUnmount, Mounter } from '../types'; import { AppNotFound } from './app_not_found_screen'; import { ScopedHistory } from '../scoped_history'; @@ -39,6 +40,7 @@ interface Props { mounter?: Mounter; appStatus: AppStatus; setAppLeaveHandler: (appId: string, handler: AppLeaveHandler) => void; + setAppActionMenu: (appId: string, mount: MountPoint | undefined) => void; createScopedHistory: (appUrl: string) => ScopedHistory; setIsMounting: (isMounting: boolean) => void; } @@ -48,6 +50,7 @@ export const AppContainer: FunctionComponent = ({ appId, appPath, setAppLeaveHandler, + setAppActionMenu, createScopedHistory, appStatus, setIsMounting, @@ -84,6 +87,7 @@ export const AppContainer: FunctionComponent = ({ history: createScopedHistory(appPath), element: elementRef.current!, onAppLeave: (handler) => setAppLeaveHandler(appId, handler), + setHeaderActionMenu: (menuMount) => setAppActionMenu(appId, menuMount), })) || null; } catch (e) { // TODO: add error UI @@ -98,7 +102,16 @@ export const AppContainer: FunctionComponent = ({ mount(); return unmount; - }, [appId, appStatus, mounter, createScopedHistory, setAppLeaveHandler, appPath, setIsMounting]); + }, [ + appId, + appStatus, + mounter, + createScopedHistory, + setAppLeaveHandler, + setAppActionMenu, + appPath, + setIsMounting, + ]); return ( diff --git a/src/core/public/application/ui/app_router.tsx b/src/core/public/application/ui/app_router.tsx index f1f22237c32db..42bc9a53aee2d 100644 --- a/src/core/public/application/ui/app_router.tsx +++ b/src/core/public/application/ui/app_router.tsx @@ -23,6 +23,7 @@ import { History } from 'history'; import { Observable } from 'rxjs'; import useObservable from 'react-use/lib/useObservable'; +import type { MountPoint } from '../../types'; import { AppLeaveHandler, AppStatus, Mounter } from '../types'; import { AppContainer } from './app_container'; import { ScopedHistory } from '../scoped_history'; @@ -32,6 +33,7 @@ interface Props { history: History; appStatuses$: Observable>; setAppLeaveHandler: (appId: string, handler: AppLeaveHandler) => void; + setAppActionMenu: (appId: string, mount: MountPoint | undefined) => void; setIsMounting: (isMounting: boolean) => void; } @@ -43,6 +45,7 @@ export const AppRouter: FunctionComponent = ({ history, mounters, setAppLeaveHandler, + setAppActionMenu, appStatuses$, setIsMounting, }) => { @@ -55,25 +58,21 @@ export const AppRouter: FunctionComponent = ({ return ( - {[...mounters] - // legacy apps can have multiple sub-apps registered with the same route - // which needs additional logic that is handled in the catch-all route below - .filter(([_, mounter]) => !mounter.legacy) - .map(([appId, mounter]) => ( - ( - - )} - /> - ))} + {[...mounters].map(([appId, mounter]) => ( + ( + + )} + /> + ))} {/* handler for legacy apps and used as a catch-all to display 404 page on not existing /app/appId apps*/} = ({ url, }, }: RouteComponentProps) => { - // Find the mounter including legacy mounters with subapps: - const [id, mounter] = mounters.has(appId) - ? [appId, mounters.get(appId)] - : [...mounters].filter(([key]) => key.split(':')[0] === appId)[0] ?? []; - + // the id/mounter retrieval can be removed once #76348 is addressed + const [id, mounter] = mounters.has(appId) ? [appId, mounters.get(appId)] : []; return ( ); }} diff --git a/src/core/public/application/utils.test.ts b/src/core/public/application/utils.test.ts index 4663ca2db21e7..ee1d82a7a872e 100644 --- a/src/core/public/application/utils.test.ts +++ b/src/core/public/application/utils.test.ts @@ -18,16 +18,9 @@ */ import { of } from 'rxjs'; -import { App, AppNavLinkStatus, AppStatus, LegacyApp } from './types'; +import { App, AppNavLinkStatus, AppStatus } from './types'; import { BasePath } from '../http/base_path'; -import { - appendAppPath, - getAppInfo, - isLegacyApp, - parseAppUrl, - relativeToAbsolute, - removeSlashes, -} from './utils'; +import { appendAppPath, getAppInfo, parseAppUrl, relativeToAbsolute, removeSlashes } from './utils'; describe('removeSlashes', () => { it('only removes duplicates by default', () => { @@ -70,9 +63,11 @@ describe('appendAppPath', () => { expect(appendAppPath('/app/my-app', '')).toEqual('/app/my-app'); }); - it('preserves the trailing slash only if included in the hash', () => { + it('preserves the trailing slash only if included in the hash or appPath', () => { expect(appendAppPath('/app/my-app', '/some-path/')).toEqual('/app/my-app/some-path'); expect(appendAppPath('/app/my-app', '/some-path#/')).toEqual('/app/my-app/some-path#/'); + expect(appendAppPath('/app/my-app#/', '')).toEqual('/app/my-app#/'); + expect(appendAppPath('/app/my-app#', '/')).toEqual('/app/my-app#/'); expect(appendAppPath('/app/my-app', '/some-path#/hash/')).toEqual( '/app/my-app/some-path#/hash/' ); @@ -80,29 +75,6 @@ describe('appendAppPath', () => { }); }); -describe('isLegacyApp', () => { - it('returns true for legacy apps', () => { - expect( - isLegacyApp({ - id: 'legacy', - title: 'Legacy App', - appUrl: '/some-url', - legacy: true, - }) - ).toEqual(true); - }); - it('returns false for non-legacy apps', () => { - expect( - isLegacyApp({ - id: 'legacy', - title: 'Legacy App', - mount: () => () => undefined, - legacy: false, - }) - ).toEqual(false); - }); -}); - describe('relativeToAbsolute', () => { it('converts a relative path to an absolute url', () => { const origin = window.location.origin; @@ -113,7 +85,7 @@ describe('relativeToAbsolute', () => { }); describe('parseAppUrl', () => { - let apps: Map | LegacyApp>; + let apps: Map>; let basePath: BasePath; const getOrigin = () => 'https://kibana.local:8080'; @@ -124,19 +96,6 @@ describe('parseAppUrl', () => { title: 'some-title', mount: () => () => undefined, ...props, - legacy: false, - }; - apps.set(app.id, app); - return app; - }; - - const createLegacyApp = (props: Partial): LegacyApp => { - const app: LegacyApp = { - id: 'some-id', - title: 'some-title', - appUrl: '/my-url', - ...props, - legacy: true, }; apps.set(app.id, app); return app; @@ -153,10 +112,6 @@ describe('parseAppUrl', () => { id: 'bar', appRoute: '/custom-bar', }); - createLegacyApp({ - id: 'legacy', - appUrl: '/app/legacy', - }); }); describe('with relative paths', () => { @@ -236,18 +191,6 @@ describe('parseAppUrl', () => { path: '/path#hash/bang?hello=dolly', }); }); - it('works with legacy apps', () => { - expect(parseAppUrl('/base-path/app/legacy', basePath, apps, getOrigin)).toEqual({ - app: 'legacy', - path: undefined, - }); - expect( - parseAppUrl('/base-path/app/legacy/path#hash?query=bar', basePath, apps, getOrigin) - ).toEqual({ - app: 'legacy', - path: '/path#hash?query=bar', - }); - }); it('returns undefined when the app is not known', () => { expect(parseAppUrl('/base-path/app/non-registered', basePath, apps, getOrigin)).toEqual( undefined @@ -409,25 +352,6 @@ describe('parseAppUrl', () => { path: '/path#hash/bang?hello=dolly', }); }); - it('works with legacy apps', () => { - expect( - parseAppUrl('https://kibana.local:8080/base-path/app/legacy', basePath, apps, getOrigin) - ).toEqual({ - app: 'legacy', - path: undefined, - }); - expect( - parseAppUrl( - 'https://kibana.local:8080/base-path/app/legacy/path#hash?query=bar', - basePath, - apps, - getOrigin - ) - ).toEqual({ - app: 'legacy', - path: '/path#hash?query=bar', - }); - }); it('returns undefined when the app is not known', () => { expect( parseAppUrl( @@ -471,18 +395,6 @@ describe('getAppInfo', () => { status: AppStatus.accessible, navLinkStatus: AppNavLinkStatus.default, appRoute: `/app/some-id`, - legacy: false, - ...props, - }); - - const createLegacyApp = (props: Partial = {}): LegacyApp => ({ - appUrl: '/my-app-url', - updater$: of(() => undefined), - id: 'some-id', - title: 'some-title', - status: AppStatus.accessible, - navLinkStatus: AppNavLinkStatus.default, - legacy: true, ...props, }); @@ -496,21 +408,6 @@ describe('getAppInfo', () => { status: AppStatus.accessible, navLinkStatus: AppNavLinkStatus.visible, appRoute: `/app/some-id`, - legacy: false, - }); - }); - - it('converts a legacy application and remove sensitive properties', () => { - const app = createLegacyApp(); - const info = getAppInfo(app); - - expect(info).toEqual({ - appUrl: '/my-app-url', - id: 'some-id', - title: 'some-title', - status: AppStatus.accessible, - navLinkStatus: AppNavLinkStatus.visible, - legacy: true, }); }); diff --git a/src/core/public/application/utils.ts b/src/core/public/application/utils.ts index c5ed7b659f3ae..85760526bf544 100644 --- a/src/core/public/application/utils.ts +++ b/src/core/public/application/utils.ts @@ -18,15 +18,7 @@ */ import { IBasePath } from '../http'; -import { - App, - AppNavLinkStatus, - AppStatus, - LegacyApp, - ParsedAppUrl, - PublicAppInfo, - PublicLegacyAppInfo, -} from './types'; +import { App, AppNavLinkStatus, AppStatus, ParsedAppUrl, PublicAppInfo } from './types'; /** * Utility to remove trailing, leading or duplicate slashes. @@ -55,8 +47,8 @@ export const removeSlashes = ( export const appendAppPath = (appBasePath: string, path: string = '') => { // Only prepend slash if not a hash or query path path = path === '' || path.startsWith('#') || path.startsWith('?') ? path : `/${path}`; - // Do not remove trailing slash when in hashbang - const removeTrailing = path.indexOf('#') === -1; + // Do not remove trailing slash when in hashbang or basePath + const removeTrailing = path.indexOf('#') === -1 && appBasePath.indexOf('#') === -1; return removeSlashes(`${appBasePath}${path}`, { trailing: removeTrailing, duplicates: true, @@ -64,10 +56,6 @@ export const appendAppPath = (appBasePath: string, path: string = '') => { }); }; -export function isLegacyApp(app: App | LegacyApp): app is LegacyApp { - return app.legacy === true; -} - /** * Converts a relative path to an absolute url. * Implementation is based on a specified behavior of the browser to automatically convert @@ -95,7 +83,7 @@ export const relativeToAbsolute = (url: string): string => { export const parseAppUrl = ( url: string, basePath: IBasePath, - apps: Map | LegacyApp>, + apps: Map>, getOrigin: () => string = () => window.location.origin ): ParsedAppUrl | undefined => { url = removeBasePath(url, basePath, getOrigin()); @@ -104,7 +92,7 @@ export const parseAppUrl = ( } for (const app of apps.values()) { - const appPath = isLegacyApp(app) ? app.appUrl : app.appRoute || `/app/${app.id}`; + const appPath = app.appRoute || `/app/${app.id}`; if (url.startsWith(appPath)) { const path = url.substr(appPath.length); @@ -123,29 +111,18 @@ const removeBasePath = (url: string, basePath: IBasePath, origin: string): strin return basePath.remove(url); }; -export function getAppInfo(app: App | LegacyApp): PublicAppInfo | PublicLegacyAppInfo { +export function getAppInfo(app: App): PublicAppInfo { const navLinkStatus = app.navLinkStatus === AppNavLinkStatus.default ? app.status === AppStatus.inaccessible ? AppNavLinkStatus.hidden : AppNavLinkStatus.visible : app.navLinkStatus!; - if (isLegacyApp(app)) { - const { updater$, ...infos } = app; - return { - ...infos, - status: app.status!, - navLinkStatus, - legacy: true, - }; - } else { - const { updater$, mount, ...infos } = app; - return { - ...infos, - status: app.status!, - navLinkStatus, - appRoute: app.appRoute!, - legacy: false, - }; - } + const { updater$, mount, ...infos } = app; + return { + ...infos, + status: app.status!, + navLinkStatus, + appRoute: app.appRoute!, + }; } diff --git a/src/core/public/chrome/chrome_service.mock.ts b/src/core/public/chrome/chrome_service.mock.ts index c9a05ff4e08fe..5862ee7175f71 100644 --- a/src/core/public/chrome/chrome_service.mock.ts +++ b/src/core/public/chrome/chrome_service.mock.ts @@ -47,9 +47,6 @@ const createStartContractMock = () => { docTitle: { change: jest.fn(), reset: jest.fn(), - __legacy: { - setBaseTitle: jest.fn(), - }, }, navControls: { registerLeft: jest.fn(), diff --git a/src/core/public/chrome/chrome_service.tsx b/src/core/public/chrome/chrome_service.tsx index ef9a682d609ec..b96c34cd9fbe8 100644 --- a/src/core/public/chrome/chrome_service.tsx +++ b/src/core/public/chrome/chrome_service.tsx @@ -238,7 +238,6 @@ export class ChromeService { homeHref={http.basePath.prepend('/app/home')} isVisible$={this.isVisible$} kibanaVersion={injectedMetadata.getKibanaVersion()} - legacyMode={injectedMetadata.getLegacyMode()} navLinks$={navLinks.getNavLinks$()} recentlyAccessed$={recentlyAccessed.get$()} navControlsLeft$={navControls.getLeft$()} diff --git a/src/core/public/chrome/doc_title/doc_title_service.test.ts b/src/core/public/chrome/doc_title/doc_title_service.test.ts index 763e8c9ebd74a..953baf7e7d1d5 100644 --- a/src/core/public/chrome/doc_title/doc_title_service.test.ts +++ b/src/core/public/chrome/doc_title/doc_title_service.test.ts @@ -64,17 +64,4 @@ describe('DocTitleService', () => { expect(document.title).toEqual('InitialTitle'); }); }); - - describe('#__legacy.setBaseTitle()', () => { - it('allows to change the baseTitle after startup', async () => { - const start = getStart('InitialTitle'); - start.change('WithInitial'); - expect(document.title).toEqual('WithInitial - InitialTitle'); - start.__legacy.setBaseTitle('NewBaseTitle'); - start.change('WithNew'); - expect(document.title).toEqual('WithNew - NewBaseTitle'); - start.reset(); - expect(document.title).toEqual('NewBaseTitle'); - }); - }); }); diff --git a/src/core/public/chrome/doc_title/doc_title_service.ts b/src/core/public/chrome/doc_title/doc_title_service.ts index c6e9ec7a40b77..817a460acaf3f 100644 --- a/src/core/public/chrome/doc_title/doc_title_service.ts +++ b/src/core/public/chrome/doc_title/doc_title_service.ts @@ -59,11 +59,6 @@ export interface ChromeDocTitle { * (meaning the one present in the title meta at application load.) */ reset(): void; - - /** @internal */ - __legacy: { - setBaseTitle(baseTitle: string): void; - }; } const defaultTitle: string[] = []; @@ -85,11 +80,6 @@ export class DocTitleService { reset: () => { this.applyTitle(defaultTitle); }, - __legacy: { - setBaseTitle: (baseTitle) => { - this.baseTitle = baseTitle; - }, - }, }; } diff --git a/src/core/public/chrome/nav_links/nav_link.ts b/src/core/public/chrome/nav_links/nav_link.ts index 55b5c80526bab..4b82e0ced4505 100644 --- a/src/core/public/chrome/nav_links/nav_link.ts +++ b/src/core/public/chrome/nav_links/nav_link.ts @@ -74,54 +74,8 @@ export interface ChromeNavLink { /** * Settled state between `url`, `baseUrl`, and `active` - * - * @internalRemarks - * This should be required once legacy apps are gone. - */ - readonly href?: string; - - /** LEGACY FIELDS */ - - /** - * A url base that legacy apps can set to match deep URLs to an application. - * - * @internalRemarks - * This should be removed once legacy apps are gone. - * - * @deprecated */ - readonly subUrlBase?: string; - - /** - * A flag that tells legacy chrome to ignore the link when - * tracking sub-urls - * - * @internalRemarks - * This should be removed once legacy apps are gone. - * - * @deprecated - */ - readonly disableSubUrlTracking?: boolean; - - /** - * Whether or not the subUrl feature should be enabled. - * - * @internalRemarks - * Only read by legacy platform. - * - * @deprecated - */ - readonly linkToLastSubUrl?: boolean; - - /** - * Indicates whether or not this app is currently on the screen. - * - * @internalRemarks - * Remove this when ApplicationService is implemented and managing apps. - * - * @deprecated - */ - readonly active?: boolean; + readonly href: string; /** * Disables a link from being clickable. @@ -129,30 +83,18 @@ export interface ChromeNavLink { * @internalRemarks * This is only used by the ML and Graph plugins currently. They use this field * to disable the nav link when the license is expired. - * - * @deprecated */ readonly disabled?: boolean; /** * Hides a link from the navigation. - * - * @internalRemarks - * Remove this when ApplicationService is implemented. Instead, plugins should only - * register an Application if needed. */ readonly hidden?: boolean; - - /** - * Used to separate links to legacy applications from NP applications - * @internal - */ - readonly legacy: boolean; } /** @public */ export type ChromeNavLinkUpdateableFields = Partial< - Pick + Pick >; export class NavLinkWrapper { @@ -170,7 +112,7 @@ export class NavLinkWrapper { public update(newProps: ChromeNavLinkUpdateableFields) { // Enforce limited properties at runtime for JS code - newProps = pick(newProps, ['active', 'disabled', 'hidden', 'url', 'subUrlBase', 'href']); + newProps = pick(newProps, ['disabled', 'hidden', 'url', 'href']); return new NavLinkWrapper({ ...this.properties, ...newProps }); } } diff --git a/src/core/public/chrome/nav_links/nav_links_service.test.ts b/src/core/public/chrome/nav_links/nav_links_service.test.ts index 8f610e238b0fd..a8413ed5b546a 100644 --- a/src/core/public/chrome/nav_links/nav_links_service.test.ts +++ b/src/core/public/chrome/nav_links/nav_links_service.test.ts @@ -19,7 +19,7 @@ import { NavLinksService } from './nav_links_service'; import { take, map, takeLast } from 'rxjs/operators'; -import { App, LegacyApp } from '../../application'; +import { App } from '../../application'; import { BehaviorSubject } from 'rxjs'; const availableApps = new Map([ @@ -34,32 +34,6 @@ const availableApps = new Map([ }, ], ['chromelessApp', { id: 'chromelessApp', order: 20, title: 'Chromless App', chromeless: true }], - [ - 'legacyApp1', - { - id: 'legacyApp1', - order: 5, - title: 'Legacy App 1', - icon: 'legacyApp1', - appUrl: '/app1', - legacy: true, - }, - ], - [ - 'legacyApp2', - { - id: 'legacyApp2', - order: -10, - title: 'Legacy App 2', - euiIconType: 'canvasApp', - appUrl: '/app2', - legacy: true, - }, - ], - [ - 'legacyApp3', - { id: 'legacyApp3', order: 20, title: 'Legacy App 3', appUrl: '/app3', legacy: true }, - ], ]); const mockHttp = { @@ -76,9 +50,7 @@ describe('NavLinksService', () => { beforeEach(() => { service = new NavLinksService(); mockAppService = { - applications$: new BehaviorSubject>( - availableApps as any - ), + applications$: new BehaviorSubject>(availableApps as any), }; start = service.start({ application: mockAppService, http: mockHttp }); }); @@ -105,19 +77,19 @@ describe('NavLinksService', () => { map((links) => links.map((l) => l.id)) ) .toPromise() - ).toEqual(['app2', 'legacyApp2', 'app1', 'legacyApp1', 'legacyApp3']); + ).toEqual(['app2', 'app1']); }); it('emits multiple values', async () => { const navLinkIds$ = start.getNavLinks$().pipe(map((links) => links.map((l) => l.id))); const emittedLinks: string[][] = []; navLinkIds$.subscribe((r) => emittedLinks.push(r)); - start.update('legacyApp1', { active: true }); + start.update('app1', { href: '/foo' }); service.stop(); expect(emittedLinks).toEqual([ - ['app2', 'legacyApp2', 'app1', 'legacyApp1', 'legacyApp3'], - ['app2', 'legacyApp2', 'app1', 'legacyApp1', 'legacyApp3'], + ['app2', 'app1'], + ['app2', 'app1'], ]); }); @@ -130,7 +102,7 @@ describe('NavLinksService', () => { describe('#get()', () => { it('returns link if exists', () => { - expect(start.get('legacyApp1')!.title).toEqual('Legacy App 1'); + expect(start.get('app2')!.title).toEqual('App 2'); }); it('returns undefined if it does not exist', () => { @@ -140,19 +112,13 @@ describe('NavLinksService', () => { describe('#getAll()', () => { it('returns a sorted array of navlinks', () => { - expect(start.getAll().map((l) => l.id)).toEqual([ - 'app2', - 'legacyApp2', - 'app1', - 'legacyApp1', - 'legacyApp3', - ]); + expect(start.getAll().map((l) => l.id)).toEqual(['app2', 'app1']); }); }); describe('#has()', () => { it('returns true if exists', () => { - expect(start.has('legacyApp1')).toBe(true); + expect(start.has('app2')).toBe(true); }); it('returns false if it does not exist', () => { @@ -171,7 +137,7 @@ describe('NavLinksService', () => { map((links) => links.map((l) => l.id)) ) .toPromise() - ).toEqual(['app2', 'legacyApp2', 'app1', 'legacyApp1', 'legacyApp3']); + ).toEqual(['app2', 'app1']); }); it('does nothing on chromeless applications', async () => { @@ -184,11 +150,11 @@ describe('NavLinksService', () => { map((links) => links.map((l) => l.id)) ) .toPromise() - ).toEqual(['app2', 'legacyApp2', 'app1', 'legacyApp1', 'legacyApp3']); + ).toEqual(['app2', 'app1']); }); it('removes all other links', async () => { - start.showOnly('legacyApp1'); + start.showOnly('app2'); expect( await start .getNavLinks$() @@ -197,11 +163,11 @@ describe('NavLinksService', () => { map((links) => links.map((l) => l.id)) ) .toPromise() - ).toEqual(['legacyApp1']); + ).toEqual(['app2']); }); it('still removes all other links when availableApps are re-emitted', async () => { - start.showOnly('legacyApp2'); + start.showOnly('app2'); mockAppService.applications$.next(mockAppService.applications$.value); expect( await start @@ -211,22 +177,19 @@ describe('NavLinksService', () => { map((links) => links.map((l) => l.id)) ) .toPromise() - ).toEqual(['legacyApp2']); + ).toEqual(['app2']); }); }); describe('#update()', () => { it('updates the navlinks and returns the updated link', async () => { - expect(start.update('legacyApp1', { hidden: true })).toEqual( + expect(start.update('app2', { hidden: true })).toEqual( expect.objectContaining({ - appUrl: '/app1', - disabled: false, hidden: true, - icon: 'legacyApp1', - id: 'legacyApp1', - legacy: true, - order: 5, - title: 'Legacy App 1', + id: 'app2', + order: -10, + title: 'App 2', + euiIconType: 'canvasApp', }) ); const hiddenLinkIds = await start @@ -236,7 +199,7 @@ describe('NavLinksService', () => { map((links) => links.filter((l) => l.hidden).map((l) => l.id)) ) .toPromise(); - expect(hiddenLinkIds).toEqual(['legacyApp1']); + expect(hiddenLinkIds).toEqual(['app2']); }); it('returns undefined if link does not exist', () => { @@ -244,7 +207,7 @@ describe('NavLinksService', () => { }); it('keeps the updated link when availableApps are re-emitted', async () => { - start.update('legacyApp1', { hidden: true }); + start.update('app2', { hidden: true }); mockAppService.applications$.next(mockAppService.applications$.value); const hiddenLinkIds = await start .getNavLinks$() @@ -253,7 +216,7 @@ describe('NavLinksService', () => { map((links) => links.filter((l) => l.hidden).map((l) => l.id)) ) .toPromise(); - expect(hiddenLinkIds).toEqual(['legacyApp1']); + expect(hiddenLinkIds).toEqual(['app2']); }); }); diff --git a/src/core/public/chrome/nav_links/to_nav_link.test.ts b/src/core/public/chrome/nav_links/to_nav_link.test.ts index ba04dbed49cd4..7e2c1fc1f89f8 100644 --- a/src/core/public/chrome/nav_links/to_nav_link.test.ts +++ b/src/core/public/chrome/nav_links/to_nav_link.test.ts @@ -17,7 +17,7 @@ * under the License. */ -import { PublicAppInfo, AppNavLinkStatus, AppStatus, PublicLegacyAppInfo } from '../../application'; +import { PublicAppInfo, AppNavLinkStatus, AppStatus } from '../../application'; import { toNavLink } from './to_nav_link'; import { httpServiceMock } from '../../mocks'; @@ -28,17 +28,6 @@ const app = (props: Partial = {}): PublicAppInfo => ({ status: AppStatus.accessible, navLinkStatus: AppNavLinkStatus.default, appRoute: `/app/some-id`, - legacy: false, - ...props, -}); - -const legacyApp = (props: Partial = {}): PublicLegacyAppInfo => ({ - appUrl: '/my-app-url', - id: 'some-id', - title: 'some-title', - status: AppStatus.accessible, - navLinkStatus: AppNavLinkStatus.default, - legacy: true, ...props, }); @@ -67,11 +56,6 @@ describe('toNavLink', () => { ); }); - it('flags legacy apps when converting to navLink', () => { - expect(toNavLink(app({}), basePath).properties.legacy).toEqual(false); - expect(toNavLink(legacyApp({}), basePath).properties.legacy).toEqual(true); - }); - it('handles applications with custom app route', () => { const link = toNavLink( app({ @@ -103,32 +87,6 @@ describe('toNavLink', () => { ); }); - it('does not generate `url` for legacy app', () => { - const link = toNavLink( - legacyApp({ - appUrl: '/my-legacy-app/#foo', - defaultPath: '/some/default/path', - }), - basePath - ); - expect(link.properties.url).toBeUndefined(); - }); - - it('uses appUrl when converting legacy applications', () => { - expect( - toNavLink( - legacyApp({ - appUrl: '/my-legacy-app/#foo', - }), - basePath - ).properties - ).toEqual( - expect.objectContaining({ - baseUrl: 'http://localhost/base-path/my-legacy-app/#foo', - }) - ); - }); - it('uses the application status when the navLinkStatus is set to default', () => { expect( toNavLink( diff --git a/src/core/public/chrome/nav_links/to_nav_link.ts b/src/core/public/chrome/nav_links/to_nav_link.ts index 2dedbfd5f36ac..703c1798b6fb8 100644 --- a/src/core/public/chrome/nav_links/to_nav_link.ts +++ b/src/core/public/chrome/nav_links/to_nav_link.ts @@ -17,19 +17,14 @@ * under the License. */ -import { PublicAppInfo, AppNavLinkStatus, AppStatus, PublicLegacyAppInfo } from '../../application'; +import { PublicAppInfo, AppNavLinkStatus, AppStatus } from '../../application'; import { IBasePath } from '../../http'; import { NavLinkWrapper } from './nav_link'; import { appendAppPath } from '../../application/utils'; -export function toNavLink( - app: PublicAppInfo | PublicLegacyAppInfo, - basePath: IBasePath -): NavLinkWrapper { +export function toNavLink(app: PublicAppInfo, basePath: IBasePath): NavLinkWrapper { const useAppStatus = app.navLinkStatus === AppNavLinkStatus.default; - const relativeBaseUrl = isLegacyApp(app) - ? basePath.prepend(app.appUrl) - : basePath.prepend(app.appRoute!); + const relativeBaseUrl = basePath.prepend(app.appRoute!); const url = relativeToAbsolute(appendAppPath(relativeBaseUrl, app.defaultPath)); const baseUrl = relativeToAbsolute(relativeBaseUrl); @@ -39,14 +34,9 @@ export function toNavLink( ? app.status === AppStatus.inaccessible : app.navLinkStatus === AppNavLinkStatus.hidden, disabled: useAppStatus ? false : app.navLinkStatus === AppNavLinkStatus.disabled, - legacy: isLegacyApp(app), baseUrl, - ...(isLegacyApp(app) - ? {} - : { - href: url, - url, - }), + href: url, + url, }); } @@ -63,7 +53,3 @@ export function relativeToAbsolute(url: string) { a.setAttribute('href', url); return a.href; } - -function isLegacyApp(app: PublicAppInfo | PublicLegacyAppInfo): app is PublicLegacyAppInfo { - return app.legacy === true; -} diff --git a/src/core/public/chrome/recently_accessed/persisted_log.test.ts b/src/core/public/chrome/recently_accessed/persisted_log.test.ts index 4229efdf7ca9d..345ad8f3a1f5a 100644 --- a/src/core/public/chrome/recently_accessed/persisted_log.test.ts +++ b/src/core/public/chrome/recently_accessed/persisted_log.test.ts @@ -28,12 +28,6 @@ const createMockStorage = () => ({ length: 0, }); -jest.mock('ui/chrome', () => { - return { - getBasePath: () => `/some/base/path`, - }; -}); - const historyName = 'testHistory'; const historyLimit = 10; const payload = [ diff --git a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap index fe959e570ab98..a770ece8496e4 100644 --- a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap +++ b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap @@ -71,7 +71,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "Custom link", "id": "Custom link", "isActive": true, - "legacy": false, "title": "Custom link", }, "closed": false, @@ -123,7 +122,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` id="collapsibe-nav" isLocked={false} isOpen={true} - legacyMode={false} navLinks$={ BehaviorSubject { "_isScalar": false, @@ -140,7 +138,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "discover", "id": "discover", "isActive": true, - "legacy": false, "title": "discover", }, Object { @@ -155,7 +152,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "siem", "id": "siem", "isActive": true, - "legacy": false, "title": "siem", }, Object { @@ -170,7 +166,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "metrics", "id": "metrics", "isActive": true, - "legacy": false, "title": "metrics", }, Object { @@ -184,7 +179,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "monitoring", "id": "monitoring", "isActive": true, - "legacy": false, "title": "monitoring", }, Object { @@ -199,7 +193,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "visualize", "id": "visualize", "isActive": true, - "legacy": false, "title": "visualize", }, Object { @@ -214,7 +207,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "dashboard", "id": "dashboard", "isActive": true, - "legacy": false, "title": "dashboard", }, Object { @@ -224,7 +216,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "canvas", "id": "canvas", "isActive": true, - "legacy": false, "title": "canvas", }, Object { @@ -239,7 +230,6 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` "href": "logs", "id": "logs", "isActive": true, - "legacy": false, "title": "logs", }, ], @@ -650,6 +640,8 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` data-test-subj="collapsibleNavGroup-recentlyViewed" id="mockId" initialIsOpen={true} + isLoading={false} + isLoadingMessage={false} onToggle={[Function]} paddingSize="none" > @@ -901,6 +893,8 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` data-test-subj="collapsibleNavGroup-kibana" id="mockId" initialIsOpen={true} + isLoading={false} + isLoadingMessage={false} onToggle={[Function]} paddingSize="none" > @@ -1188,6 +1182,8 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` data-test-subj="collapsibleNavGroup-observability" id="mockId" initialIsOpen={true} + isLoading={false} + isLoadingMessage={false} onToggle={[Function]} paddingSize="none" > @@ -1436,6 +1432,8 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` data-test-subj="collapsibleNavGroup-securitySolution" id="mockId" initialIsOpen={true} + isLoading={false} + isLoadingMessage={false} onToggle={[Function]} paddingSize="none" > @@ -1636,6 +1634,8 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` data-test-subj="collapsibleNavGroup-management" id="mockId" initialIsOpen={true} + isLoading={false} + isLoadingMessage={false} onToggle={[Function]} paddingSize="none" > @@ -2106,7 +2106,6 @@ exports[`CollapsibleNav renders the default nav 1`] = ` id="collapsibe-nav" isLocked={false} isOpen={false} - legacyMode={false} navLinks$={ BehaviorSubject { "_isScalar": false, @@ -2341,7 +2340,6 @@ exports[`CollapsibleNav renders the default nav 2`] = ` id="collapsibe-nav" isLocked={false} isOpen={true} - legacyMode={false} navLinks$={ BehaviorSubject { "_isScalar": false, @@ -2624,6 +2622,8 @@ exports[`CollapsibleNav renders the default nav 2`] = ` data-test-subj="collapsibleNavGroup-recentlyViewed" id="mockId" initialIsOpen={true} + isLoading={false} + isLoadingMessage={false} onToggle={[Function]} paddingSize="none" > @@ -3026,7 +3026,6 @@ exports[`CollapsibleNav renders the default nav 3`] = ` id="collapsibe-nav" isLocked={true} isOpen={true} - legacyMode={false} navLinks$={ BehaviorSubject { "_isScalar": false, @@ -3305,6 +3304,8 @@ exports[`CollapsibleNav renders the default nav 3`] = ` data-test-subj="collapsibleNavGroup-recentlyViewed" id="mockId" initialIsOpen={true} + isLoading={false} + isLoadingMessage={false} onToggle={[Function]} paddingSize="none" > diff --git a/src/core/public/chrome/ui/header/__snapshots__/header.test.tsx.snap b/src/core/public/chrome/ui/header/__snapshots__/header.test.tsx.snap index f101c30a1241b..128a0c5369e08 100644 --- a/src/core/public/chrome/ui/header/__snapshots__/header.test.tsx.snap +++ b/src/core/public/chrome/ui/header/__snapshots__/header.test.tsx.snap @@ -29,6 +29,15 @@ exports[`Header renders 1`] = ` "management": Object {}, "navLinks": Object {}, }, + "currentActionMenu$": BehaviorSubject { + "_isScalar": false, + "_value": undefined, + "closed": false, + "hasError": false, + "isStopped": false, + "observers": Array [], + "thrownError": null, + }, "currentAppId$": Observable { "_isScalar": false, "source": Subject { @@ -106,8 +115,8 @@ exports[`Header renders 1`] = ` "_isScalar": false, "_value": Object { "baseUrl": "", + "href": "", "id": "cloud-deployment-link", - "legacy": false, "title": "Manage cloud deployment", }, "closed": false, @@ -353,7 +362,6 @@ exports[`Header renders 1`] = ` } kibanaDocLink="/docs" kibanaVersion="1.0.0" - legacyMode={false} loadingCount$={ BehaviorSubject { "_isScalar": false, @@ -431,8 +439,8 @@ exports[`Header renders 1`] = ` "_value": Array [ Object { "baseUrl": "", + "href": "", "id": "kibana", - "legacy": false, "title": "kibana", }, ], @@ -641,6 +649,15 @@ exports[`Header renders 2`] = ` "management": Object {}, "navLinks": Object {}, }, + "currentActionMenu$": BehaviorSubject { + "_isScalar": false, + "_value": undefined, + "closed": false, + "hasError": false, + "isStopped": false, + "observers": Array [], + "thrownError": null, + }, "currentAppId$": Observable { "_isScalar": false, "source": Subject { @@ -832,8 +849,8 @@ exports[`Header renders 2`] = ` "_isScalar": false, "_value": Object { "baseUrl": "", + "href": "", "id": "cloud-deployment-link", - "legacy": false, "title": "Manage cloud deployment", }, "closed": false, @@ -1871,7 +1888,6 @@ exports[`Header renders 2`] = ` } kibanaDocLink="/docs" kibanaVersion="1.0.0" - legacyMode={false} loadingCount$={ BehaviorSubject { "_isScalar": false, @@ -2025,8 +2041,8 @@ exports[`Header renders 2`] = ` "_value": Array [ Object { "baseUrl": "", + "href": "", "id": "kibana", - "legacy": false, "title": "kibana", }, ], @@ -2397,8 +2413,8 @@ exports[`Header renders 2`] = ` "_value": Array [ Object { "baseUrl": "", + "href": "", "id": "kibana", - "legacy": false, "title": "kibana", }, ], @@ -4569,8 +4585,8 @@ exports[`Header renders 2`] = ` "_isScalar": false, "_value": Object { "baseUrl": "", + "href": "", "id": "cloud-deployment-link", - "legacy": false, "title": "Manage cloud deployment", }, "closed": false, @@ -4622,15 +4638,14 @@ exports[`Header renders 2`] = ` id="mockId" isLocked={false} isOpen={false} - legacyMode={false} navLinks$={ BehaviorSubject { "_isScalar": false, "_value": Array [ Object { "baseUrl": "", + "href": "", "id": "kibana", - "legacy": false, "title": "kibana", }, ], @@ -4854,6 +4869,15 @@ exports[`Header renders 3`] = ` "management": Object {}, "navLinks": Object {}, }, + "currentActionMenu$": BehaviorSubject { + "_isScalar": false, + "_value": undefined, + "closed": false, + "hasError": false, + "isStopped": false, + "observers": Array [], + "thrownError": null, + }, "currentAppId$": Observable { "_isScalar": false, "source": Subject { @@ -5045,8 +5069,8 @@ exports[`Header renders 3`] = ` "_isScalar": false, "_value": Object { "baseUrl": "", + "href": "", "id": "cloud-deployment-link", - "legacy": false, "title": "Manage cloud deployment", }, "closed": false, @@ -6084,7 +6108,6 @@ exports[`Header renders 3`] = ` } kibanaDocLink="/docs" kibanaVersion="1.0.0" - legacyMode={false} loadingCount$={ BehaviorSubject { "_isScalar": false, @@ -6238,8 +6261,8 @@ exports[`Header renders 3`] = ` "_value": Array [ Object { "baseUrl": "", + "href": "", "id": "kibana", - "legacy": false, "title": "kibana", }, ], @@ -6610,8 +6633,8 @@ exports[`Header renders 3`] = ` "_value": Array [ Object { "baseUrl": "", + "href": "", "id": "kibana", - "legacy": false, "title": "kibana", }, ], @@ -8782,8 +8805,8 @@ exports[`Header renders 3`] = ` "_isScalar": false, "_value": Object { "baseUrl": "", + "href": "", "id": "cloud-deployment-link", - "legacy": false, "title": "Manage cloud deployment", }, "closed": false, @@ -8835,15 +8858,14 @@ exports[`Header renders 3`] = ` id="mockId" isLocked={true} isOpen={false} - legacyMode={false} navLinks$={ BehaviorSubject { "_isScalar": false, "_value": Array [ Object { "baseUrl": "", + "href": "", "id": "kibana", - "legacy": false, "title": "kibana", }, ], @@ -9056,7 +9078,7 @@ exports[`Header renders 3`] = ` Array [ Object { "data-test-subj": "collapsibleNavCustomNavLink", - "href": undefined, + "href": "", "icon": undefined, "iconType": undefined, "isActive": false, @@ -9080,6 +9102,7 @@ exports[`Header renders 3`] = ` @@ -9451,6 +9476,7 @@ exports[`Header renders 3`] = ` @@ -14098,6 +14131,7 @@ exports[`Header renders 4`] = ` className="euiNavDrawerGroup__item" data-name="kibana" data-test-subj="navDrawerAppsMenuLink" + href="" icon={ ) { id: title, href: title, baseUrl: '/', - legacy: false, isActive: true, 'data-test-subj': title, }; @@ -62,7 +61,6 @@ function mockProps() { isLocked: false, isOpen: false, homeHref: '/', - legacyMode: false, navLinks$: new BehaviorSubject([]), recentlyAccessed$: new BehaviorSubject([]), storage: new StubBrowserStorage(), diff --git a/src/core/public/chrome/ui/header/collapsible_nav.tsx b/src/core/public/chrome/ui/header/collapsible_nav.tsx index 5abd14312f4a6..a5f42c0949562 100644 --- a/src/core/public/chrome/ui/header/collapsible_nav.tsx +++ b/src/core/public/chrome/ui/header/collapsible_nav.tsx @@ -81,7 +81,6 @@ interface Props { isLocked: boolean; isOpen: boolean; homeHref: string; - legacyMode: boolean; navLinks$: Rx.Observable; recentlyAccessed$: Rx.Observable; storage?: Storage; @@ -97,7 +96,6 @@ export function CollapsibleNav({ isLocked, isOpen, homeHref, - legacyMode, storage = window.localStorage, onIsLockedUpdate, closeNav, @@ -116,7 +114,6 @@ export function CollapsibleNav({ const readyForEUI = (link: ChromeNavLink, needsIcon: boolean = false) => { return createEuiListItem({ link, - legacyMode, appId, dataTestSubj: 'collapsibleNavAppLink', navigateToApp, @@ -148,7 +145,6 @@ export function CollapsibleNav({ listItems={[ createEuiListItem({ link: customNavLink, - legacyMode, basePath, navigateToApp, dataTestSubj: 'collapsibleNavCustomNavLink', diff --git a/src/core/public/chrome/ui/header/header.test.tsx b/src/core/public/chrome/ui/header/header.test.tsx index a9fa15d43182b..04eb256f30f37 100644 --- a/src/core/public/chrome/ui/header/header.test.tsx +++ b/src/core/public/chrome/ui/header/header.test.tsx @@ -50,7 +50,6 @@ function mockProps() { forceAppSwitcherNavigation$: new BehaviorSubject(false), helpExtension$: new BehaviorSubject(undefined), helpSupportUrl$: new BehaviorSubject(''), - legacyMode: false, navControlsLeft$: new BehaviorSubject([]), navControlsRight$: new BehaviorSubject([]), basePath: http.basePath, @@ -74,13 +73,13 @@ describe('Header', () => { const isLocked$ = new BehaviorSubject(false); const navType$ = new BehaviorSubject('modern' as NavType); const navLinks$ = new BehaviorSubject([ - { id: 'kibana', title: 'kibana', baseUrl: '', legacy: false }, + { id: 'kibana', title: 'kibana', baseUrl: '', href: '' }, ]); const customNavLink$ = new BehaviorSubject({ id: 'cloud-deployment-link', title: 'Manage cloud deployment', baseUrl: '', - legacy: false, + href: '', }); const recentlyAccessed$ = new BehaviorSubject([ { link: '', label: 'dashboard', id: 'dashboard' }, diff --git a/src/core/public/chrome/ui/header/header.tsx b/src/core/public/chrome/ui/header/header.tsx index 0624d66a9598b..c0b3fc72930dc 100644 --- a/src/core/public/chrome/ui/header/header.tsx +++ b/src/core/public/chrome/ui/header/header.tsx @@ -67,7 +67,6 @@ export interface HeaderProps { forceAppSwitcherNavigation$: Observable; helpExtension$: Observable; helpSupportUrl$: Observable; - legacyMode: boolean; navControlsLeft$: Observable; navControlsRight$: Observable; basePath: HttpStart['basePath']; @@ -93,7 +92,6 @@ function renderMenuTrigger(toggleOpen: () => void) { export function Header({ kibanaVersion, kibanaDocLink, - legacyMode, application, basePath, onIsLockedUpdate, @@ -195,7 +193,6 @@ export function Header({ isOpen={isOpen} homeHref={homeHref} basePath={basePath} - legacyMode={legacyMode} navigateToApp={application.navigateToApp} onIsLockedUpdate={onIsLockedUpdate} closeNav={() => { @@ -218,7 +215,6 @@ export function Header({ appId$={application.currentAppId$} navigateToApp={application.navigateToApp} ref={navDrawerRef} - legacyMode={legacyMode} /> )} diff --git a/src/core/public/chrome/ui/header/nav_drawer.tsx b/src/core/public/chrome/ui/header/nav_drawer.tsx index ee4bff6cc0ac4..fc080fbafc303 100644 --- a/src/core/public/chrome/ui/header/nav_drawer.tsx +++ b/src/core/public/chrome/ui/header/nav_drawer.tsx @@ -33,7 +33,6 @@ export interface Props { appId$: InternalApplicationStart['currentAppId$']; basePath: HttpStart['basePath']; isLocked?: boolean; - legacyMode: boolean; navLinks$: Observable; recentlyAccessed$: Observable; navigateToApp: CoreStart['application']['navigateToApp']; @@ -41,7 +40,7 @@ export interface Props { } function NavDrawerRenderer( - { isLocked, onIsLockedUpdate, basePath, legacyMode, navigateToApp, ...observables }: Props, + { isLocked, onIsLockedUpdate, basePath, navigateToApp, ...observables }: Props, ref: React.Ref ) { const appId = useObservable(observables.appId$, ''); @@ -67,7 +66,6 @@ function NavDrawerRenderer( listItems={navLinks.map((link) => createEuiListItem({ link, - legacyMode, appId, basePath, navigateToApp, diff --git a/src/core/public/chrome/ui/header/nav_link.tsx b/src/core/public/chrome/ui/header/nav_link.tsx index c70a40f49643e..04d9c5bf7a10a 100644 --- a/src/core/public/chrome/ui/header/nav_link.tsx +++ b/src/core/public/chrome/ui/header/nav_link.tsx @@ -29,7 +29,6 @@ export const isModifiedOrPrevented = (event: React.MouseEvent {}, @@ -52,12 +50,7 @@ export function createEuiListItem({ dataTestSubj, externalLink = false, }: Props) { - const { legacy, active, id, title, disabled, euiIconType, icon, tooltip } = link; - let { href } = link; - - if (legacy) { - href = link.url && !active ? link.url : link.baseUrl; - } + const { href, id, title, disabled, euiIconType, icon, tooltip } = link; return { label: tooltip ?? title, @@ -70,8 +63,6 @@ export function createEuiListItem({ if ( !externalLink && // ignore external links - !legacyMode && // ignore when in legacy mode - !legacy && // ignore links to legacy apps event.button === 0 && // ignore everything but left clicks !isModifiedOrPrevented(event) ) { @@ -79,8 +70,7 @@ export function createEuiListItem({ navigateToApp(id); } }, - // Legacy apps use `active` property, NP apps should match the current app - isActive: active || appId === id, + isActive: appId === id, isDisabled: disabled, 'data-test-subj': dataTestSubj, ...(basePath && { @@ -116,7 +106,7 @@ export function createRecentNavLink( ) { const { link, label } = recentLink; const href = relativeToAbsolute(basePath.prepend(link)); - const navLink = navLinks.find((nl) => href.startsWith(nl.baseUrl ?? nl.subUrlBase)); + const navLink = navLinks.find((nl) => href.startsWith(nl.baseUrl)); let titleAndAriaLabel = label; if (navLink) { diff --git a/src/core/public/core_app/status/lib/load_status.test.ts b/src/core/public/core_app/status/lib/load_status.test.ts index 3a444a4448467..5a9f982e106a7 100644 --- a/src/core/public/core_app/status/lib/load_status.test.ts +++ b/src/core/public/core_app/status/lib/load_status.test.ts @@ -57,6 +57,7 @@ const mockedResponse: StatusResponse = { ], }, metrics: { + collected_at: new Date('2020-01-01 01:00:00'), collection_interval_in_millis: 1000, os: { platform: 'darwin' as const, diff --git a/src/core/public/core_app/styles/_globals_v7dark.scss b/src/core/public/core_app/styles/_globals_v7dark.scss index 8ac841aab8469..9a4a965d63a38 100644 --- a/src/core/public/core_app/styles/_globals_v7dark.scss +++ b/src/core/public/core_app/styles/_globals_v7dark.scss @@ -3,9 +3,6 @@ // prepended to all .scss imports (from JS, when v7dark theme selected) @import '@elastic/eui/src/themes/eui/eui_colors_dark'; - -@import '@elastic/eui/src/global_styling/functions/index'; -@import '@elastic/eui/src/global_styling/variables/index'; -@import '@elastic/eui/src/global_styling/mixins/index'; +@import '@elastic/eui/src/themes/eui/eui_globals'; @import './mixins'; diff --git a/src/core/public/core_app/styles/_globals_v7light.scss b/src/core/public/core_app/styles/_globals_v7light.scss index 701bbdfe03662..ddb4b5b31fa1f 100644 --- a/src/core/public/core_app/styles/_globals_v7light.scss +++ b/src/core/public/core_app/styles/_globals_v7light.scss @@ -3,9 +3,6 @@ // prepended to all .scss imports (from JS, when v7light theme selected) @import '@elastic/eui/src/themes/eui/eui_colors_light'; - -@import '@elastic/eui/src/global_styling/functions/index'; -@import '@elastic/eui/src/global_styling/variables/index'; -@import '@elastic/eui/src/global_styling/mixins/index'; +@import '@elastic/eui/src/themes/eui/eui_globals'; @import './mixins'; diff --git a/src/core/public/core_app/styles/_globals_v8dark.scss b/src/core/public/core_app/styles/_globals_v8dark.scss index 972365e9e9d0e..9ad9108f350ff 100644 --- a/src/core/public/core_app/styles/_globals_v8dark.scss +++ b/src/core/public/core_app/styles/_globals_v8dark.scss @@ -3,14 +3,6 @@ // prepended to all .scss imports (from JS, when v8dark theme selected) @import '@elastic/eui/src/themes/eui-amsterdam/eui_amsterdam_colors_dark'; - -@import '@elastic/eui/src/global_styling/functions/index'; -@import '@elastic/eui/src/themes/eui-amsterdam/global_styling/functions/index'; - -@import '@elastic/eui/src/global_styling/variables/index'; -@import '@elastic/eui/src/themes/eui-amsterdam/global_styling/variables/index'; - -@import '@elastic/eui/src/global_styling/mixins/index'; -@import '@elastic/eui/src/themes/eui-amsterdam/global_styling/mixins/index'; +@import '@elastic/eui/src/themes/eui-amsterdam/eui_amsterdam_globals'; @import './mixins'; diff --git a/src/core/public/core_app/styles/_globals_v8light.scss b/src/core/public/core_app/styles/_globals_v8light.scss index dc99f4d45082e..a6b2cb84c2062 100644 --- a/src/core/public/core_app/styles/_globals_v8light.scss +++ b/src/core/public/core_app/styles/_globals_v8light.scss @@ -3,14 +3,6 @@ // prepended to all .scss imports (from JS, when v8light theme selected) @import '@elastic/eui/src/themes/eui-amsterdam/eui_amsterdam_colors_light'; - -@import '@elastic/eui/src/global_styling/functions/index'; -@import '@elastic/eui/src/themes/eui-amsterdam/global_styling/functions/index'; - -@import '@elastic/eui/src/global_styling/variables/index'; -@import '@elastic/eui/src/themes/eui-amsterdam/global_styling/variables/index'; - -@import '@elastic/eui/src/global_styling/mixins/index'; -@import '@elastic/eui/src/themes/eui-amsterdam/global_styling/mixins/index'; +@import '@elastic/eui/src/themes/eui-amsterdam/eui_amsterdam_globals'; @import './mixins'; diff --git a/src/core/public/core_system.test.mocks.ts b/src/core/public/core_system.test.mocks.ts index b5b99418b44b4..d0e457386ffca 100644 --- a/src/core/public/core_system.test.mocks.ts +++ b/src/core/public/core_system.test.mocks.ts @@ -23,7 +23,6 @@ import { fatalErrorsServiceMock } from './fatal_errors/fatal_errors_service.mock import { httpServiceMock } from './http/http_service.mock'; import { i18nServiceMock } from './i18n/i18n_service.mock'; import { injectedMetadataServiceMock } from './injected_metadata/injected_metadata_service.mock'; -import { legacyPlatformServiceMock } from './legacy/legacy_service.mock'; import { notificationServiceMock } from './notifications/notifications_service.mock'; import { overlayServiceMock } from './overlays/overlay_service.mock'; import { pluginsServiceMock } from './plugins/plugins_service.mock'; @@ -34,14 +33,6 @@ import { contextServiceMock } from './context/context_service.mock'; import { integrationsServiceMock } from './integrations/integrations_service.mock'; import { coreAppMock } from './core_app/core_app.mock'; -export const MockLegacyPlatformService = legacyPlatformServiceMock.create(); -export const LegacyPlatformServiceConstructor = jest - .fn() - .mockImplementation(() => MockLegacyPlatformService); -jest.doMock('./legacy', () => ({ - LegacyPlatformService: LegacyPlatformServiceConstructor, -})); - export const MockInjectedMetadataService = injectedMetadataServiceMock.create(); export const InjectedMetadataServiceConstructor = jest .fn() diff --git a/src/core/public/core_system.test.ts b/src/core/public/core_system.test.ts index 4c1993c90a2e1..213237309c30b 100644 --- a/src/core/public/core_system.test.ts +++ b/src/core/public/core_system.test.ts @@ -23,13 +23,11 @@ import { HttpServiceConstructor, I18nServiceConstructor, InjectedMetadataServiceConstructor, - LegacyPlatformServiceConstructor, MockChromeService, MockFatalErrorsService, MockHttpService, MockI18nService, MockInjectedMetadataService, - MockLegacyPlatformService, MockNotificationsService, MockOverlayService, MockPluginsService, @@ -80,7 +78,6 @@ describe('constructor', () => { createCoreSystem(); expect(InjectedMetadataServiceConstructor).toHaveBeenCalledTimes(1); - expect(LegacyPlatformServiceConstructor).toHaveBeenCalledTimes(1); expect(I18nServiceConstructor).toHaveBeenCalledTimes(1); expect(FatalErrorsServiceConstructor).toHaveBeenCalledTimes(1); expect(NotificationServiceConstructor).toHaveBeenCalledTimes(1); @@ -106,25 +103,6 @@ describe('constructor', () => { }); }); - it('passes required params to LegacyPlatformService', () => { - const requireLegacyFiles = { requireLegacyFiles: true }; - const requireLegacyBootstrapModule = { requireLegacyBootstrapModule: true }; - const requireNewPlatformShimModule = { requireNewPlatformShimModule: true }; - - createCoreSystem({ - requireLegacyFiles, - requireLegacyBootstrapModule, - requireNewPlatformShimModule, - }); - - expect(LegacyPlatformServiceConstructor).toHaveBeenCalledTimes(1); - expect(LegacyPlatformServiceConstructor).toHaveBeenCalledWith({ - requireLegacyFiles, - requireLegacyBootstrapModule, - requireNewPlatformShimModule, - }); - }); - it('passes browserSupportsCsp to ChromeService', () => { createCoreSystem(); @@ -190,7 +168,6 @@ describe('#setup()', () => { pluginDependencies: new Map([ [pluginA, []], [pluginB, [pluginA]], - [MockLegacyPlatformService.legacyId, [pluginA, pluginB]], ]), }); }); @@ -301,11 +278,6 @@ describe('#start()', () => { expect(MockPluginsService.start).toHaveBeenCalledTimes(1); }); - it('calls legacyPlatform#start()', async () => { - await startCore(); - expect(MockLegacyPlatformService.start).toHaveBeenCalledTimes(1); - }); - it('calls overlays#start()', async () => { await startCore(); expect(MockOverlayService.start).toHaveBeenCalledTimes(1); @@ -317,7 +289,6 @@ describe('#start()', () => { expect(MockRenderingService.start).toHaveBeenCalledWith({ application: expect.any(Object), chrome: expect.any(Object), - injectedMetadata: expect.any(Object), overlays: expect.any(Object), targetDomElement: expect.any(HTMLElement), }); @@ -335,14 +306,6 @@ describe('#start()', () => { }); describe('#stop()', () => { - it('calls legacyPlatform.stop()', () => { - const coreSystem = createCoreSystem(); - - expect(MockLegacyPlatformService.stop).not.toHaveBeenCalled(); - coreSystem.stop(); - expect(MockLegacyPlatformService.stop).toHaveBeenCalled(); - }); - it('calls notifications.stop()', () => { const coreSystem = createCoreSystem(); @@ -422,7 +385,6 @@ describe('RenderingService targetDomElement', () => { let targetDomElementParentInStart: HTMLElement | null; MockRenderingService.start.mockImplementation(({ targetDomElement }) => { targetDomElementParentInStart = targetDomElement.parentElement; - return { legacyTargetDomElement: document.createElement('div') }; }); // Starting the core system should pass the targetDomElement as a child of the rootDomElement @@ -432,24 +394,6 @@ describe('RenderingService targetDomElement', () => { }); }); -describe('LegacyPlatformService targetDomElement', () => { - it('only mounts the element when start, after setting up the legacyPlatformService', async () => { - const core = createCoreSystem(); - - let targetDomElementInStart: HTMLElement | undefined; - MockLegacyPlatformService.start.mockImplementation(({ targetDomElement }) => { - targetDomElementInStart = targetDomElement; - }); - - await core.setup(); - await core.start(); - // Starting the core system should pass the legacyTargetDomElement to the LegacyPlatformService - const renderingLegacyTargetDomElement = - MockRenderingService.start.mock.results[0].value.legacyTargetDomElement; - expect(targetDomElementInStart!).toBe(renderingLegacyTargetDomElement); - }); -}); - describe('Notifications targetDomElement', () => { it('only mounts the element when started, after setting up the notificationsService', async () => { const rootDomElement = document.createElement('div'); diff --git a/src/core/public/core_system.ts b/src/core/public/core_system.ts index e08841b0271d9..006d0036f7a12 100644 --- a/src/core/public/core_system.ts +++ b/src/core/public/core_system.ts @@ -30,13 +30,12 @@ import { InjectedMetadataSetup, InjectedMetadataStart, } from './injected_metadata'; -import { LegacyPlatformParams, LegacyPlatformService } from './legacy'; import { NotificationsService } from './notifications'; import { OverlayService } from './overlays'; import { PluginsService } from './plugins'; import { UiSettingsService } from './ui_settings'; import { ApplicationService } from './application'; -import { mapToObject, pick } from '../utils/'; +import { pick } from '../utils/'; import { DocLinksService } from './doc_links'; import { RenderingService } from './rendering'; import { SavedObjectsService } from './saved_objects'; @@ -49,9 +48,6 @@ interface Params { rootDomElement: HTMLElement; browserSupportsCsp: boolean; injectedMetadata: InjectedMetadataParams['injectedMetadata']; - requireLegacyFiles?: LegacyPlatformParams['requireLegacyFiles']; - requireLegacyBootstrapModule?: LegacyPlatformParams['requireLegacyBootstrapModule']; - requireNewPlatformShimModule?: LegacyPlatformParams['requireNewPlatformShimModule']; } /** @internal */ @@ -86,7 +82,6 @@ export interface InternalCoreStart extends Omit { export class CoreSystem { private readonly fatalErrors: FatalErrorsService; private readonly injectedMetadata: InjectedMetadataService; - private readonly legacy: LegacyPlatformService; private readonly notifications: NotificationsService; private readonly http: HttpService; private readonly savedObjects: SavedObjectsService; @@ -107,14 +102,7 @@ export class CoreSystem { private fatalErrorsSetup: FatalErrorsSetup | null = null; constructor(params: Params) { - const { - rootDomElement, - browserSupportsCsp, - injectedMetadata, - requireLegacyFiles, - requireLegacyBootstrapModule, - requireNewPlatformShimModule, - } = params; + const { rootDomElement, browserSupportsCsp, injectedMetadata } = params; this.rootDomElement = rootDomElement; @@ -145,12 +133,6 @@ export class CoreSystem { this.context = new ContextService(this.coreContext); this.plugins = new PluginsService(this.coreContext, injectedMetadata.uiPlugins); this.coreApp = new CoreApp(this.coreContext); - - this.legacy = new LegacyPlatformService({ - requireLegacyFiles, - requireLegacyBootstrapModule, - requireNewPlatformShimModule, - }); } public async setup() { @@ -170,16 +152,9 @@ export class CoreSystem { const pluginDependencies = this.plugins.getOpaqueIds(); const context = this.context.setup({ - // We inject a fake "legacy plugin" with dependencies on every plugin so that legacy plugins: - // 1) Can access context from any NP plugin - // 2) Can register context providers that will only be available to other legacy plugins and will not leak into - // New Platform plugins. - pluginDependencies: new Map([ - ...pluginDependencies, - [this.legacy.legacyId, [...pluginDependencies.keys()]], - ]), + pluginDependencies: new Map([...pluginDependencies]), }); - const application = this.application.setup({ context, http, injectedMetadata }); + const application = this.application.setup({ context, http }); this.coreApp.setup({ application, http, injectedMetadata, notifications }); const core: InternalCoreSetup = { @@ -193,12 +168,7 @@ export class CoreSystem { }; // Services that do not expose contracts at setup - const plugins = await this.plugins.setup(core); - - await this.legacy.setup({ - core, - plugins: mapToObject(plugins.contracts), - }); + await this.plugins.setup(core); return { fatalErrors: this.fatalErrorsSetup }; } catch (error) { @@ -277,7 +247,7 @@ export class CoreSystem { fatalErrors, }; - const plugins = await this.plugins.start(core); + await this.plugins.start(core); // ensure the rootDomElement is empty this.rootDomElement.textContent = ''; @@ -286,20 +256,13 @@ export class CoreSystem { this.rootDomElement.appendChild(notificationsTargetDomElement); this.rootDomElement.appendChild(overlayTargetDomElement); - const rendering = this.rendering.start({ + this.rendering.start({ application, chrome, - injectedMetadata, overlays, targetDomElement: coreUiTargetDomElement, }); - await this.legacy.start({ - core, - plugins: mapToObject(plugins.contracts), - targetDomElement: rendering.legacyTargetDomElement, - }); - return { application, }; @@ -315,7 +278,6 @@ export class CoreSystem { } public stop() { - this.legacy.stop(); this.plugins.stop(); this.coreApp.stop(); this.notifications.stop(); diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts index fc753517fd940..95ac8bba57049 100644 --- a/src/core/public/doc_links/doc_links_service.ts +++ b/src/core/public/doc_links/doc_links_service.ts @@ -129,7 +129,7 @@ export class DocLinksService { }, visualize: { guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/visualize.html`, - timelionDeprecation: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/timelion.html#timelion-deprecation`, + timelionDeprecation: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/dashboard.html#timelion-deprecation`, }, }, }); diff --git a/src/core/public/index.ts b/src/core/public/index.ts index 9176a277b3f43..a9774dafd2340 100644 --- a/src/core/public/index.ts +++ b/src/core/public/index.ts @@ -61,7 +61,6 @@ import { import { FatalErrorsSetup, FatalErrorsStart, FatalErrorInfo } from './fatal_errors'; import { HttpSetup, HttpStart } from './http'; import { I18nStart } from './i18n'; -import { InjectedMetadataSetup, InjectedMetadataStart, LegacyNavLink } from './injected_metadata'; import { NotificationsSetup, NotificationsStart } from './notifications'; import { OverlayStart } from './overlays'; import { Plugin, PluginInitializer, PluginInitializerContext, PluginOpaqueId } from './plugins'; @@ -106,7 +105,6 @@ export { ApplicationStart, App, PublicAppInfo, - AppBase, AppMount, AppMountDeprecated, AppUnmount, @@ -122,8 +120,6 @@ export { AppUpdatableFields, AppUpdater, ScopedHistory, - LegacyApp, - PublicLegacyAppInfo, NavigateToAppOptions, } from './application'; @@ -230,7 +226,7 @@ export interface CoreSetup { - /** @deprecated */ - injectedMetadata: InjectedMetadataSetup; -} - -/** - * Start interface exposed to the legacy platform via the `ui/new_platform` module. - * - * @remarks - * Some methods are not supported in the legacy platform and while present to make this type compatibile with - * {@link CoreStart}, unsupported methods will throw exceptions when called. - * - * @public - * @deprecated - */ -export interface LegacyCoreStart extends CoreStart { - /** @deprecated */ - injectedMetadata: InjectedMetadataStart; -} - export { Capabilities, ChromeBadge, @@ -356,7 +322,6 @@ export { HttpSetup, HttpStart, I18nStart, - LegacyNavLink, NotificationsSetup, NotificationsStart, Plugin, diff --git a/src/core/public/injected_metadata/index.ts b/src/core/public/injected_metadata/index.ts index cebd0f017de69..925eeab187535 100644 --- a/src/core/public/injected_metadata/index.ts +++ b/src/core/public/injected_metadata/index.ts @@ -23,5 +23,4 @@ export { InjectedMetadataSetup, InjectedMetadataStart, InjectedPluginMetadata, - LegacyNavLink, } from './injected_metadata_service'; diff --git a/src/core/public/injected_metadata/injected_metadata_service.mock.ts b/src/core/public/injected_metadata/injected_metadata_service.mock.ts index e6b1c440519bd..3bb4358406246 100644 --- a/src/core/public/injected_metadata/injected_metadata_service.mock.ts +++ b/src/core/public/injected_metadata/injected_metadata_service.mock.ts @@ -25,7 +25,6 @@ const createSetupContractMock = () => { getKibanaVersion: jest.fn(), getKibanaBranch: jest.fn(), getCspConfig: jest.fn(), - getLegacyMode: jest.fn(), getAnonymousStatusPage: jest.fn(), getLegacyMetadata: jest.fn(), getPlugins: jest.fn(), @@ -35,7 +34,6 @@ const createSetupContractMock = () => { }; setupContract.getCspConfig.mockReturnValue({ warnLegacyBrowsers: true }); setupContract.getKibanaVersion.mockReturnValue('kibanaVersion'); - setupContract.getLegacyMode.mockReturnValue(true); setupContract.getAnonymousStatusPage.mockReturnValue(false); setupContract.getLegacyMetadata.mockReturnValue({ app: { diff --git a/src/core/public/injected_metadata/injected_metadata_service.ts b/src/core/public/injected_metadata/injected_metadata_service.ts index db4bfdf415bcc..23630a5bcf228 100644 --- a/src/core/public/injected_metadata/injected_metadata_service.ts +++ b/src/core/public/injected_metadata/injected_metadata_service.ts @@ -28,17 +28,6 @@ import { import { deepFreeze } from '../../utils/'; import { AppCategory } from '../'; -/** @public */ -export interface LegacyNavLink { - id: string; - category?: AppCategory; - title: string; - order: number; - url: string; - icon?: string; - euiIconType?: string; -} - export interface InjectedPluginMetadata { id: PluginName; plugin: DiscoveredPlugin; @@ -67,7 +56,6 @@ export interface InjectedMetadataParams { packageInfo: Readonly; }; uiPlugins: InjectedPluginMetadata[]; - legacyMode: boolean; anonymousStatusPage: boolean; legacyMetadata: { app: { @@ -75,7 +63,6 @@ export interface InjectedMetadataParams { title: string; }; bundleId: string; - nav: LegacyNavLink[]; version: string; branch: string; buildNum: number; @@ -137,10 +124,6 @@ export class InjectedMetadataService { return this.state.uiPlugins; }, - getLegacyMode: () => { - return this.state.legacyMode; - }, - getLegacyMetadata: () => { return this.state.legacyMetadata; }, @@ -182,8 +165,6 @@ export interface InjectedMetadataSetup { * An array of frontend plugins in topological order. */ getPlugins: () => InjectedPluginMetadata[]; - /** Indicates whether or not we are rendering a known legacy app. */ - getLegacyMode: () => boolean; getAnonymousStatusPage: () => boolean; getLegacyMetadata: () => { app: { @@ -191,7 +172,6 @@ export interface InjectedMetadataSetup { title: string; }; bundleId: string; - nav: LegacyNavLink[]; version: string; branch: string; buildNum: number; diff --git a/src/core/public/legacy/index.ts b/src/core/public/legacy/index.ts deleted file mode 100644 index 1ea43d8deebbc..0000000000000 --- a/src/core/public/legacy/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { LegacyPlatformService, LegacyPlatformParams } from './legacy_service'; diff --git a/src/core/public/legacy/legacy_service.mock.ts b/src/core/public/legacy/legacy_service.mock.ts deleted file mode 100644 index 0c8d9682185d5..0000000000000 --- a/src/core/public/legacy/legacy_service.mock.ts +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { LegacyPlatformService } from './legacy_service'; - -// Use Required to get only public properties -type LegacyPlatformServiceContract = Required; -const createMock = () => { - const mocked: jest.Mocked = { - legacyId: Symbol(), - setup: jest.fn(), - start: jest.fn(), - stop: jest.fn(), - }; - return mocked; -}; - -export const legacyPlatformServiceMock = { - create: createMock, -}; diff --git a/src/core/public/legacy/legacy_service.test.ts b/src/core/public/legacy/legacy_service.test.ts deleted file mode 100644 index cb29abc9b0ccc..0000000000000 --- a/src/core/public/legacy/legacy_service.test.ts +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import angular from 'angular'; - -import { chromeServiceMock } from '../chrome/chrome_service.mock'; -import { fatalErrorsServiceMock } from '../fatal_errors/fatal_errors_service.mock'; -import { httpServiceMock } from '../http/http_service.mock'; -import { i18nServiceMock } from '../i18n/i18n_service.mock'; -import { injectedMetadataServiceMock } from '../injected_metadata/injected_metadata_service.mock'; -import { notificationServiceMock } from '../notifications/notifications_service.mock'; -import { overlayServiceMock } from '../overlays/overlay_service.mock'; -import { uiSettingsServiceMock } from '../ui_settings/ui_settings_service.mock'; -import { LegacyPlatformService } from './legacy_service'; -import { applicationServiceMock } from '../application/application_service.mock'; -import { docLinksServiceMock } from '../doc_links/doc_links_service.mock'; -import { savedObjectsServiceMock } from '../saved_objects/saved_objects_service.mock'; -import { contextServiceMock } from '../context/context_service.mock'; - -const applicationSetup = applicationServiceMock.createInternalSetupContract(); -const contextSetup = contextServiceMock.createSetupContract(); -const docLinksSetup = docLinksServiceMock.createSetupContract(); -const fatalErrorsSetup = fatalErrorsServiceMock.createSetupContract(); -const httpSetup = httpServiceMock.createSetupContract(); -const injectedMetadataSetup = injectedMetadataServiceMock.createSetupContract(); -const notificationsSetup = notificationServiceMock.createSetupContract(); -const uiSettingsSetup = uiSettingsServiceMock.createSetupContract(); - -const mockLoadOrder: string[] = []; -const mockUiNewPlatformSetup = jest.fn(); -const mockUiNewPlatformStart = jest.fn(); -const mockUiChromeBootstrap = jest.fn(); -const defaultParams = { - requireLegacyFiles: jest.fn(() => { - mockLoadOrder.push('legacy files'); - }), - requireLegacyBootstrapModule: jest.fn(() => { - mockLoadOrder.push('ui/chrome'); - return { - bootstrap: mockUiChromeBootstrap, - }; - }), - requireNewPlatformShimModule: jest.fn(() => ({ - __setup__: mockUiNewPlatformSetup, - __start__: mockUiNewPlatformStart, - })), -}; - -const defaultSetupDeps = { - core: { - application: applicationSetup, - context: contextSetup, - docLinks: docLinksSetup, - fatalErrors: fatalErrorsSetup, - injectedMetadata: injectedMetadataSetup, - notifications: notificationsSetup, - http: httpSetup, - uiSettings: uiSettingsSetup, - }, - plugins: {}, -}; - -const applicationStart = applicationServiceMock.createInternalStartContract(); -const docLinksStart = docLinksServiceMock.createStartContract(); -const httpStart = httpServiceMock.createStartContract(); -const chromeStart = chromeServiceMock.createStartContract(); -const i18nStart = i18nServiceMock.createStartContract(); -const injectedMetadataStart = injectedMetadataServiceMock.createStartContract(); -const notificationsStart = notificationServiceMock.createStartContract(); -const overlayStart = overlayServiceMock.createStartContract(); -const uiSettingsStart = uiSettingsServiceMock.createStartContract(); -const savedObjectsStart = savedObjectsServiceMock.createStartContract(); -const fatalErrorsStart = fatalErrorsServiceMock.createStartContract(); -const mockStorage = { getItem: jest.fn() } as any; - -const defaultStartDeps = { - core: { - application: applicationStart, - docLinks: docLinksStart, - http: httpStart, - chrome: chromeStart, - i18n: i18nStart, - injectedMetadata: injectedMetadataStart, - notifications: notificationsStart, - overlays: overlayStart, - uiSettings: uiSettingsStart, - savedObjects: savedObjectsStart, - fatalErrors: fatalErrorsStart, - }, - lastSubUrlStorage: mockStorage, - targetDomElement: document.createElement('div'), - plugins: {}, -}; - -afterEach(() => { - jest.clearAllMocks(); - jest.resetModules(); - mockLoadOrder.length = 0; -}); - -describe('#setup()', () => { - describe('default', () => { - it('initializes new platform shim module with core APIs', () => { - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - legacyPlatform.setup(defaultSetupDeps); - - expect(mockUiNewPlatformSetup).toHaveBeenCalledTimes(1); - expect(mockUiNewPlatformSetup).toHaveBeenCalledWith(expect.any(Object), {}); - }); - - it('throws error if requireNewPlatformShimModule is undefined', () => { - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - requireNewPlatformShimModule: undefined, - }); - - expect(() => { - legacyPlatform.setup(defaultSetupDeps); - }).toThrowErrorMatchingInlineSnapshot( - `"requireNewPlatformShimModule must be specified when rendering a legacy application"` - ); - - expect(mockUiNewPlatformSetup).not.toHaveBeenCalled(); - }); - }); -}); - -describe('#start()', () => { - it('fetches and sets legacy lastSubUrls', () => { - chromeStart.navLinks.getAll.mockReturnValue([ - { id: 'link1', baseUrl: 'http://wowza.com/app1', legacy: true } as any, - ]); - mockStorage.getItem.mockReturnValue('http://wowza.com/app1/subUrl'); - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - legacyPlatform.setup(defaultSetupDeps); - legacyPlatform.start({ ...defaultStartDeps, lastSubUrlStorage: mockStorage }); - - expect(chromeStart.navLinks.update).toHaveBeenCalledWith('link1', { - url: 'http://wowza.com/app1/subUrl', - }); - }); - - it('initializes ui/new_platform with core APIs', () => { - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - legacyPlatform.setup(defaultSetupDeps); - legacyPlatform.start(defaultStartDeps); - - expect(mockUiNewPlatformStart).toHaveBeenCalledTimes(1); - expect(mockUiNewPlatformStart).toHaveBeenCalledWith(expect.any(Object), {}); - }); - - it('throws error if requireNewPlatformShimeModule is undefined', () => { - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - requireNewPlatformShimModule: undefined, - }); - - expect(() => { - legacyPlatform.start(defaultStartDeps); - }).toThrowErrorMatchingInlineSnapshot( - `"requireNewPlatformShimModule must be specified when rendering a legacy application"` - ); - - expect(mockUiNewPlatformStart).not.toHaveBeenCalled(); - }); - - it('resolves getStartServices with core and plugin APIs', async () => { - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - legacyPlatform.setup(defaultSetupDeps); - legacyPlatform.start(defaultStartDeps); - - const { getStartServices } = mockUiNewPlatformSetup.mock.calls[0][0]; - const [coreStart, pluginsStart] = await getStartServices(); - expect(coreStart).toEqual(expect.any(Object)); - expect(pluginsStart).toBe(defaultStartDeps.plugins); - }); - - it('passes the targetDomElement to legacy bootstrap module', () => { - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - legacyPlatform.setup(defaultSetupDeps); - legacyPlatform.start(defaultStartDeps); - - expect(mockUiChromeBootstrap).toHaveBeenCalledTimes(1); - expect(mockUiChromeBootstrap).toHaveBeenCalledWith(defaultStartDeps.targetDomElement); - }); - - describe('load order', () => { - it('loads ui/modules before ui/chrome, and both before legacy files', () => { - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - expect(mockLoadOrder).toEqual([]); - - legacyPlatform.setup(defaultSetupDeps); - legacyPlatform.start(defaultStartDeps); - - expect(mockLoadOrder).toMatchInlineSnapshot(` - Array [ - "ui/chrome", - "legacy files", - ] - `); - }); - }); -}); - -describe('#stop()', () => { - it('does nothing if angular was not bootstrapped to targetDomElement', () => { - const targetDomElement = document.createElement('div'); - targetDomElement.innerHTML = ` -

this should not be removed

- `; - - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - legacyPlatform.stop(); - expect(targetDomElement).toMatchInlineSnapshot(` -
- - -

- this should not be removed -

- - -
- `); - }); - - it('destroys the angular scope and empties the targetDomElement if angular is bootstrapped to targetDomElement', async () => { - const targetDomElement = document.createElement('div'); - const scopeDestroySpy = jest.fn(); - - const legacyPlatform = new LegacyPlatformService({ - ...defaultParams, - }); - - // simulate bootstrapping with a module "foo" - angular.module('foo', []).directive('bar', () => ({ - restrict: 'E', - link($scope) { - $scope.$on('$destroy', scopeDestroySpy); - }, - })); - - targetDomElement.innerHTML = ` - - `; - - angular.bootstrap(targetDomElement, ['foo']); - - await legacyPlatform.setup(defaultSetupDeps); - legacyPlatform.start({ ...defaultStartDeps, targetDomElement }); - legacyPlatform.stop(); - - expect(targetDomElement).toMatchInlineSnapshot(` -
- `); - expect(scopeDestroySpy).toHaveBeenCalledTimes(1); - }); -}); diff --git a/src/core/public/legacy/legacy_service.ts b/src/core/public/legacy/legacy_service.ts deleted file mode 100644 index 78a9219f3d694..0000000000000 --- a/src/core/public/legacy/legacy_service.ts +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import angular from 'angular'; -import { first } from 'rxjs/operators'; -import { Subject } from 'rxjs'; -import { InternalCoreSetup, InternalCoreStart } from '../core_system'; -import { LegacyCoreSetup, LegacyCoreStart, MountPoint } from '../'; - -/** @internal */ -export interface LegacyPlatformParams { - requireLegacyFiles?: () => void; - requireLegacyBootstrapModule?: () => BootstrapModule; - requireNewPlatformShimModule?: () => { - __setup__: (legacyCore: LegacyCoreSetup, plugins: Record) => void; - __start__: (legacyCore: LegacyCoreStart, plugins: Record) => void; - }; -} - -interface SetupDeps { - core: InternalCoreSetup; - plugins: Record; -} - -interface StartDeps { - core: InternalCoreStart; - plugins: Record; - lastSubUrlStorage?: Storage; - targetDomElement?: HTMLElement; -} - -interface BootstrapModule { - bootstrap: MountPoint; -} - -/** - * The LegacyPlatformService is responsible for initializing - * the legacy platform by injecting parts of the new platform - * services into the legacy platform modules, like ui/modules, - * and then bootstrapping the ui/chrome or ~~ui/test_harness~~ to - * setup either the app or browser tests. - */ -export class LegacyPlatformService { - /** Symbol to represent the legacy platform as a fake "plugin". Used by the ContextService */ - public readonly legacyId = Symbol(); - private bootstrapModule?: BootstrapModule; - private targetDomElement?: HTMLElement; - private readonly startDependencies$ = new Subject<[LegacyCoreStart, object, {}]>(); - private readonly startDependencies = this.startDependencies$.pipe(first()).toPromise(); - - constructor(private readonly params: LegacyPlatformParams) {} - - public setup({ core, plugins }: SetupDeps) { - // Always register legacy apps, even if not in legacy mode. - core.injectedMetadata.getLegacyMetadata().nav.forEach((navLink: any) => - core.application.registerLegacyApp({ - id: navLink.id, - order: navLink.order, - title: navLink.title, - euiIconType: navLink.euiIconType, - icon: navLink.icon, - appUrl: navLink.url, - subUrlBase: navLink.subUrlBase, - linkToLastSubUrl: navLink.linkToLastSubUrl, - category: navLink.category, - disableSubUrlTracking: navLink.disableSubUrlTracking, - }) - ); - - const legacyCore: LegacyCoreSetup = { - ...core, - getStartServices: () => this.startDependencies, - application: { - ...core.application, - register: notSupported(`core.application.register()`), - registerMountContext: notSupported(`core.application.registerMountContext()`), - }, - }; - - // Inject parts of the new platform into parts of the legacy platform - // so that legacy APIs/modules can mimic their new platform counterparts - if (core.injectedMetadata.getLegacyMode()) { - if (!this.params.requireNewPlatformShimModule) { - throw new Error( - `requireNewPlatformShimModule must be specified when rendering a legacy application` - ); - } - - this.params.requireNewPlatformShimModule().__setup__(legacyCore, plugins); - } - } - - public start({ - core, - targetDomElement, - plugins, - lastSubUrlStorage = window.sessionStorage, - }: StartDeps) { - // Initialize legacy sub urls - core.chrome.navLinks - .getAll() - .filter((link) => link.legacy) - .forEach((navLink) => { - const lastSubUrl = lastSubUrlStorage.getItem(`lastSubUrl:${navLink.baseUrl}`); - core.chrome.navLinks.update(navLink.id, { - url: lastSubUrl || navLink.url || navLink.baseUrl, - }); - }); - - // Only import and bootstrap legacy platform if we're in legacy mode. - if (!core.injectedMetadata.getLegacyMode()) { - return; - } - - const legacyCore: LegacyCoreStart = { - ...core, - application: { - applications$: core.application.applications$, - currentAppId$: core.application.currentAppId$, - capabilities: core.application.capabilities, - getUrlForApp: core.application.getUrlForApp, - navigateToApp: core.application.navigateToApp, - navigateToUrl: core.application.navigateToUrl, - registerMountContext: notSupported(`core.application.registerMountContext()`), - }, - }; - - this.startDependencies$.next([legacyCore, plugins, {}]); - - if (!this.params.requireNewPlatformShimModule) { - throw new Error( - `requireNewPlatformShimModule must be specified when rendering a legacy application` - ); - } - if (!this.params.requireLegacyBootstrapModule) { - throw new Error( - `requireLegacyBootstrapModule must be specified when rendering a legacy application` - ); - } - - // Inject parts of the new platform into parts of the legacy platform - // so that legacy APIs/modules can mimic their new platform counterparts - this.params.requireNewPlatformShimModule().__start__(legacyCore, plugins); - - // Load the bootstrap module before loading the legacy platform files so that - // the bootstrap module can modify the environment a bit first - this.bootstrapModule = this.params.requireLegacyBootstrapModule(); - - // require the files that will tie into the legacy platform - if (this.params.requireLegacyFiles) { - this.params.requireLegacyFiles(); - } - - if (!this.bootstrapModule) { - throw new Error('Bootstrap module must be loaded before `start`'); - } - - this.targetDomElement = targetDomElement; - - // `targetDomElement` is always defined when in legacy mode - this.bootstrapModule.bootstrap(this.targetDomElement!); - } - - public stop() { - if (!this.targetDomElement) { - return; - } - - const angularRoot = angular.element(this.targetDomElement); - const injector$ = angularRoot.injector(); - - // if we haven't gotten to the point of bootstrapping - // angular, injector$ won't be defined - if (!injector$) { - return; - } - - // destroy the root angular scope - injector$.get('$rootScope').$destroy(); - - // clear the inner html of the root angular element - this.targetDomElement.textContent = ''; - } -} - -const notSupported = (methodName: string) => (...args: any[]) => { - throw new Error(`${methodName} is not supported in the legacy platform.`); -}; diff --git a/src/core/public/mocks.ts b/src/core/public/mocks.ts index 2f7f6fae94436..8ed415c09806c 100644 --- a/src/core/public/mocks.ts +++ b/src/core/public/mocks.ts @@ -42,7 +42,6 @@ export { fatalErrorsServiceMock } from './fatal_errors/fatal_errors_service.mock export { httpServiceMock } from './http/http_service.mock'; export { i18nServiceMock } from './i18n/i18n_service.mock'; export { injectedMetadataServiceMock } from './injected_metadata/injected_metadata_service.mock'; -export { legacyPlatformServiceMock } from './legacy/legacy_service.mock'; export { notificationServiceMock } from './notifications/notifications_service.mock'; export { overlayServiceMock } from './overlays/overlay_service.mock'; export { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock'; @@ -166,6 +165,7 @@ function createAppMountParametersMock(appBasePath = '') { element: document.createElement('div'), history, onAppLeave: jest.fn(), + setHeaderActionMenu: jest.fn(), }; return params; diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md index 6f25f46c76fb9..c473ea67d9bcd 100644 --- a/src/core/public/public.api.md +++ b/src/core/public/public.api.md @@ -7,6 +7,7 @@ import { Action } from 'history'; import { ApiResponse } from '@elastic/elasticsearch/lib/Transport'; import Boom from 'boom'; +import { ErrorToastOptions as ErrorToastOptions_2 } from 'src/core/public/notifications'; import { EuiBreadcrumb } from '@elastic/eui'; import { EuiButtonEmptyProps } from '@elastic/eui'; import { EuiConfirmModalProps } from '@elastic/eui'; @@ -27,7 +28,9 @@ import { PublicUiSettingsParams as PublicUiSettingsParams_2 } from 'src/core/ser import React from 'react'; import { RecursiveReadonly } from '@kbn/utility-types'; import * as Rx from 'rxjs'; +import { SavedObject as SavedObject_2 } from 'src/core/server'; import { ShallowPromise } from '@kbn/utility-types'; +import { ToastInputFields as ToastInputFields_2 } from 'src/core/public/notifications'; import { TransportRequestOptions } from '@elastic/elasticsearch/lib/Transport'; import { TransportRequestParams } from '@elastic/elasticsearch/lib/Transport'; import { TransportRequestPromise } from '@elastic/elasticsearch/lib/Transport'; @@ -39,25 +42,18 @@ import { UserProvidedValues as UserProvidedValues_2 } from 'src/core/server/type // @internal (undocumented) export function __kbnBootstrap__(): void; -// @public -export interface App extends AppBase { - appRoute?: string; - chromeless?: boolean; - exactRoute?: boolean; - mount: AppMount | AppMountDeprecated; -} - // @public (undocumented) -export interface AppBase { +export interface App { + appRoute?: string; capabilities?: Partial; category?: AppCategory; chromeless?: boolean; defaultPath?: string; euiIconType?: string; + exactRoute?: boolean; icon?: string; id: string; - // @internal - legacy?: boolean; + mount: AppMount | AppMountDeprecated; navLinkStatus?: AppNavLinkStatus; order?: number; status?: AppStatus; @@ -121,7 +117,7 @@ export interface ApplicationSetup { // @public (undocumented) export interface ApplicationStart { - applications$: Observable>; + applications$: Observable>; capabilities: RecursiveReadonly; currentAppId$: Observable; getUrlForApp(appId: string, options?: { @@ -165,6 +161,7 @@ export interface AppMountParameters { element: HTMLElement; history: ScopedHistory; onAppLeave: (handler: AppLeaveHandler) => void; + setHeaderActionMenu: (menuMount: MountPoint | undefined) => void; } // @public @@ -185,10 +182,10 @@ export enum AppStatus { export type AppUnmount = () => void; // @public -export type AppUpdatableFields = Pick; +export type AppUpdatableFields = Pick; // @public -export type AppUpdater = (app: AppBase) => Partial | undefined; +export type AppUpdater = (app: App) => Partial | undefined; // @public export function assertNever(x: never): never; @@ -226,10 +223,6 @@ export type ChromeBreadcrumb = EuiBreadcrumb; // @public export interface ChromeDocTitle { - // @internal (undocumented) - __legacy: { - setBaseTitle(baseTitle: string): void; - }; change(newTitle: string | string[]): void; reset(): void; } @@ -289,28 +282,18 @@ export interface ChromeNavControls { // @public (undocumented) export interface ChromeNavLink { - // @deprecated - readonly active?: boolean; readonly baseUrl: string; readonly category?: AppCategory; - // @deprecated readonly disabled?: boolean; - // @deprecated - readonly disableSubUrlTracking?: boolean; readonly euiIconType?: string; readonly hidden?: boolean; - readonly href?: string; + readonly href: string; readonly icon?: string; readonly id: string; - // @internal - readonly legacy: boolean; - // @deprecated - readonly linkToLastSubUrl?: boolean; readonly order?: number; - // @deprecated - readonly subUrlBase?: string; readonly title: string; readonly tooltip?: string; + // Warning: (ae-unresolved-link) The @link reference could not be resolved: The package "kibana" does not have an export "AppBase" readonly url?: string; } @@ -323,12 +306,14 @@ export interface ChromeNavLinks { getNavLinks$(): Observable>>; has(id: string): boolean; showOnly(id: string): void; + // Warning: (ae-unresolved-link) The @link reference could not be resolved: The package "kibana" does not have an export "AppBase" + // // @deprecated update(id: string, values: ChromeNavLinkUpdateableFields): ChromeNavLink | undefined; } // @public (undocumented) -export type ChromeNavLinkUpdateableFields = Partial>; +export type ChromeNavLinkUpdateableFields = Partial>; // @public export interface ChromeRecentlyAccessed { @@ -880,52 +865,6 @@ export interface IUiSettingsClient { set: (key: string, value: any) => Promise; } -// @public (undocumented) -export interface LegacyApp extends AppBase { - // (undocumented) - appUrl: string; - // (undocumented) - disableSubUrlTracking?: boolean; - // (undocumented) - linkToLastSubUrl?: boolean; - // (undocumented) - subUrlBase?: string; -} - -// @public @deprecated -export interface LegacyCoreSetup extends CoreSetup { - // Warning: (ae-forgotten-export) The symbol "InjectedMetadataSetup" needs to be exported by the entry point index.d.ts - // - // @deprecated (undocumented) - injectedMetadata: InjectedMetadataSetup; -} - -// @public @deprecated -export interface LegacyCoreStart extends CoreStart { - // Warning: (ae-forgotten-export) The symbol "InjectedMetadataStart" needs to be exported by the entry point index.d.ts - // - // @deprecated (undocumented) - injectedMetadata: InjectedMetadataStart; -} - -// @public (undocumented) -export interface LegacyNavLink { - // (undocumented) - category?: AppCategory; - // (undocumented) - euiIconType?: string; - // (undocumented) - icon?: string; - // (undocumented) - id: string; - // (undocumented) - order: number; - // (undocumented) - title: string; - // (undocumented) - url: string; -} - // @public export function modifyUrl(url: string, urlModifier: (urlParts: URLMeaningfulParts) => Partial | void): string; @@ -1039,19 +978,11 @@ export type PluginOpaqueId = symbol; // @public export type PublicAppInfo = Omit & { - legacy: false; status: AppStatus; navLinkStatus: AppNavLinkStatus; appRoute: string; }; -// @public -export type PublicLegacyAppInfo = Omit & { - legacy: true; - status: AppStatus; - navLinkStatus: AppNavLinkStatus; -}; - // @public export type PublicUiSettingsParams = Omit; @@ -1188,8 +1119,10 @@ export interface SavedObjectsFindOptions { // (undocumented) defaultSearchOperator?: 'AND' | 'OR'; fields?: string[]; + // Warning: (ae-forgotten-export) The symbol "KueryNode" needs to be exported by the entry point index.d.ts + // // (undocumented) - filter?: string; + filter?: string | KueryNode; // (undocumented) hasReference?: { type: string; @@ -1542,6 +1475,6 @@ export interface UserProvidedValues { // Warnings were encountered during analysis: // -// src/core/public/core_system.ts:215:21 - (ae-forgotten-export) The symbol "InternalApplicationStart" needs to be exported by the entry point index.d.ts +// src/core/public/core_system.ts:185:21 - (ae-forgotten-export) The symbol "InternalApplicationStart" needs to be exported by the entry point index.d.ts ``` diff --git a/src/core/public/rendering/index.ts b/src/core/public/rendering/index.ts index 7c1ea7031b763..1de82a50a36b5 100644 --- a/src/core/public/rendering/index.ts +++ b/src/core/public/rendering/index.ts @@ -17,4 +17,4 @@ * under the License. */ -export { RenderingService, RenderingStart } from './rendering_service'; +export { RenderingService } from './rendering_service'; diff --git a/src/core/public/rendering/rendering_service.mock.ts b/src/core/public/rendering/rendering_service.mock.ts index bb4e7cb49f150..bb4723e69ab5e 100644 --- a/src/core/public/rendering/rendering_service.mock.ts +++ b/src/core/public/rendering/rendering_service.mock.ts @@ -17,25 +17,16 @@ * under the License. */ -import { RenderingStart, RenderingService } from './rendering_service'; - -const createStartContractMock = () => { - const setupContract: jest.Mocked = { - legacyTargetDomElement: document.createElement('div'), - }; - return setupContract; -}; +import { RenderingService } from './rendering_service'; type RenderingServiceContract = PublicMethodsOf; const createMock = () => { const mocked: jest.Mocked = { start: jest.fn(), }; - mocked.start.mockReturnValue(createStartContractMock()); return mocked; }; export const renderingServiceMock = { create: createMock, - createStartContract: createStartContractMock, }; diff --git a/src/core/public/rendering/rendering_service.test.tsx b/src/core/public/rendering/rendering_service.test.tsx index 437a602a3d447..37658cb51c46f 100644 --- a/src/core/public/rendering/rendering_service.test.tsx +++ b/src/core/public/rendering/rendering_service.test.tsx @@ -23,7 +23,6 @@ import { act } from 'react-dom/test-utils'; import { RenderingService } from './rendering_service'; import { applicationServiceMock } from '../application/application_service.mock'; import { chromeServiceMock } from '../chrome/chrome_service.mock'; -import { injectedMetadataServiceMock } from '../injected_metadata/injected_metadata_service.mock'; import { overlayServiceMock } from '../overlays/overlay_service.mock'; import { BehaviorSubject } from 'rxjs'; @@ -31,7 +30,6 @@ describe('RenderingService#start', () => { let application: ReturnType; let chrome: ReturnType; let overlays: ReturnType; - let injectedMetadata: ReturnType; let targetDomElement: HTMLDivElement; let rendering: RenderingService; @@ -45,8 +43,6 @@ describe('RenderingService#start', () => { overlays = overlayServiceMock.createStartContract(); overlays.banners.getComponent.mockReturnValue(
I'm a banner!
); - injectedMetadata = injectedMetadataServiceMock.createStartContract(); - targetDomElement = document.createElement('div'); rendering = new RenderingService(); @@ -56,20 +52,14 @@ describe('RenderingService#start', () => { return rendering.start({ application, chrome, - injectedMetadata, overlays, targetDomElement, }); }; - describe('standard mode', () => { - beforeEach(() => { - injectedMetadata.getLegacyMode.mockReturnValue(false); - }); - - it('renders application service into provided DOM element', () => { - startService(); - expect(targetDomElement.querySelector('div.application')).toMatchInlineSnapshot(` + it('renders application service into provided DOM element', () => { + startService(); + expect(targetDomElement.querySelector('div.application')).toMatchInlineSnapshot(`
@@ -78,50 +68,50 @@ describe('RenderingService#start', () => {
`); - }); + }); - it('adds the `chrome-hidden` class to the AppWrapper when chrome is hidden', () => { - const isVisible$ = new BehaviorSubject(true); - chrome.getIsVisible$.mockReturnValue(isVisible$); - startService(); + it('adds the `chrome-hidden` class to the AppWrapper when chrome is hidden', () => { + const isVisible$ = new BehaviorSubject(true); + chrome.getIsVisible$.mockReturnValue(isVisible$); + startService(); - const appWrapper = targetDomElement.querySelector('div.app-wrapper')!; - expect(appWrapper.className).toEqual('app-wrapper'); + const appWrapper = targetDomElement.querySelector('div.app-wrapper')!; + expect(appWrapper.className).toEqual('app-wrapper'); - act(() => isVisible$.next(false)); - expect(appWrapper.className).toEqual('app-wrapper hidden-chrome'); + act(() => isVisible$.next(false)); + expect(appWrapper.className).toEqual('app-wrapper hidden-chrome'); - act(() => isVisible$.next(true)); - expect(appWrapper.className).toEqual('app-wrapper'); - }); + act(() => isVisible$.next(true)); + expect(appWrapper.className).toEqual('app-wrapper'); + }); - it('adds the application classes to the AppContainer', () => { - const applicationClasses$ = new BehaviorSubject([]); - chrome.getApplicationClasses$.mockReturnValue(applicationClasses$); - startService(); + it('adds the application classes to the AppContainer', () => { + const applicationClasses$ = new BehaviorSubject([]); + chrome.getApplicationClasses$.mockReturnValue(applicationClasses$); + startService(); - const appContainer = targetDomElement.querySelector('div.application')!; - expect(appContainer.className).toEqual('application'); + const appContainer = targetDomElement.querySelector('div.application')!; + expect(appContainer.className).toEqual('application'); - act(() => applicationClasses$.next(['classA', 'classB'])); - expect(appContainer.className).toEqual('application classA classB'); + act(() => applicationClasses$.next(['classA', 'classB'])); + expect(appContainer.className).toEqual('application classA classB'); - act(() => applicationClasses$.next(['classC'])); - expect(appContainer.className).toEqual('application classC'); + act(() => applicationClasses$.next(['classC'])); + expect(appContainer.className).toEqual('application classC'); - act(() => applicationClasses$.next([])); - expect(appContainer.className).toEqual('application'); - }); + act(() => applicationClasses$.next([])); + expect(appContainer.className).toEqual('application'); + }); - it('contains wrapper divs', () => { - startService(); - expect(targetDomElement.querySelector('div.app-wrapper')).toBeDefined(); - expect(targetDomElement.querySelector('div.app-wrapper-pannel')).toBeDefined(); - }); + it('contains wrapper divs', () => { + startService(); + expect(targetDomElement.querySelector('div.app-wrapper')).toBeDefined(); + expect(targetDomElement.querySelector('div.app-wrapper-pannel')).toBeDefined(); + }); - it('renders the banner UI', () => { - startService(); - expect(targetDomElement.querySelector('#globalBannerList')).toMatchInlineSnapshot(` + it('renders the banner UI', () => { + startService(); + expect(targetDomElement.querySelector('#globalBannerList')).toMatchInlineSnapshot(`
@@ -130,36 +120,5 @@ describe('RenderingService#start', () => {
`); - }); - }); - - describe('legacy mode', () => { - beforeEach(() => { - injectedMetadata.getLegacyMode.mockReturnValue(true); - }); - - it('renders into provided DOM element', () => { - startService(); - - expect(targetDomElement).toMatchInlineSnapshot(` -
-
-
- Hello chrome! -
-
-
-
- `); - }); - - it('returns a div for the legacy service to render into', () => { - const { legacyTargetDomElement } = startService(); - - expect(targetDomElement.contains(legacyTargetDomElement!)).toBe(true); - }); }); }); diff --git a/src/core/public/rendering/rendering_service.tsx b/src/core/public/rendering/rendering_service.tsx index 58b8c1921e333..a20e14dbf61c5 100644 --- a/src/core/public/rendering/rendering_service.tsx +++ b/src/core/public/rendering/rendering_service.tsx @@ -23,14 +23,12 @@ import { I18nProvider } from '@kbn/i18n/react'; import { InternalChromeStart } from '../chrome'; import { InternalApplicationStart } from '../application'; -import { InjectedMetadataStart } from '../injected_metadata'; import { OverlayStart } from '../overlays'; import { AppWrapper, AppContainer } from './app_containers'; interface StartDeps { application: InternalApplicationStart; chrome: InternalChromeStart; - injectedMetadata: InjectedMetadataStart; overlays: OverlayStart; targetDomElement: HTMLDivElement; } @@ -41,53 +39,28 @@ interface StartDeps { * @internalRemarks Currently this only renders Chrome UI. Notifications and * Overlays UI should be moved here as well. * - * @returns a DOM element for the legacy platform to render into. - * * @internal */ export class RenderingService { - start({ - application, - chrome, - injectedMetadata, - overlays, - targetDomElement, - }: StartDeps): RenderingStart { + start({ application, chrome, overlays, targetDomElement }: StartDeps) { const chromeUi = chrome.getHeaderComponent(); const appUi = application.getComponent(); const bannerUi = overlays.banners.getComponent(); - const legacyMode = injectedMetadata.getLegacyMode(); - const legacyRef = legacyMode ? React.createRef() : null; - ReactDOM.render(
{chromeUi} - {!legacyMode && ( - -
-
{bannerUi}
- {appUi} -
-
- )} - - {legacyMode &&
} + +
+
{bannerUi}
+ {appUi} +
+
, targetDomElement ); - - return { - // When in legacy mode, return legacy div, otherwise undefined. - legacyTargetDomElement: legacyRef ? legacyRef.current! : undefined, - }; } } - -/** @internal */ -export interface RenderingStart { - legacyTargetDomElement?: HTMLDivElement; -} diff --git a/src/core/public/styles/_base.scss b/src/core/public/styles/_base.scss index 9b06b526fc7dd..427c6b7735435 100644 --- a/src/core/public/styles/_base.scss +++ b/src/core/public/styles/_base.scss @@ -1,4 +1,10 @@ +// Charts themes available app-wide +@import '@elastic/charts/dist/theme'; +@import '@elastic/eui/src/themes/charts/theme'; + +// Grab some nav-specific EUI vars @import '@elastic/eui/src/components/collapsible_nav/variables'; + // Application Layout // chrome-context diff --git a/src/core/public/ui_settings/ui_settings_api.test.ts b/src/core/public/ui_settings/ui_settings_api.test.ts index 14791407d2550..b15754e5f1383 100644 --- a/src/core/public/ui_settings/ui_settings_api.test.ts +++ b/src/core/public/ui_settings/ui_settings_api.test.ts @@ -22,7 +22,7 @@ import fetchMock from 'fetch-mock/es5/client'; import * as Rx from 'rxjs'; import { takeUntil, toArray } from 'rxjs/operators'; -import { setup as httpSetup } from '../../../test_utils/public/http_test_setup'; +import { setup as httpSetup } from '../../test_helpers/http_test_setup'; import { UiSettingsApi } from './ui_settings_api'; function setup() { diff --git a/src/core/server/config/deprecation/core_deprecations.ts b/src/core/server/config/deprecation/core_deprecations.ts index e4e881ab24372..2b8b8e383da24 100644 --- a/src/core/server/config/deprecation/core_deprecations.ts +++ b/src/core/server/config/deprecation/core_deprecations.ts @@ -113,7 +113,7 @@ const mapManifestServiceUrlDeprecation: ConfigDeprecation = (settings, fromPath, return settings; }; -export const coreDeprecationProvider: ConfigDeprecationProvider = ({ unusedFromRoot }) => [ +export const coreDeprecationProvider: ConfigDeprecationProvider = ({ rename, unusedFromRoot }) => [ unusedFromRoot('savedObjects.indexCheckTimeout'), unusedFromRoot('server.xsrf.token'), unusedFromRoot('maps.manifestServiceUrl'), @@ -136,6 +136,8 @@ export const coreDeprecationProvider: ConfigDeprecationProvider = ({ unusedFromR unusedFromRoot('optimize.workers'), unusedFromRoot('optimize.profile'), unusedFromRoot('optimize.validateSyntaxOfNodeModules'), + rename('cpu.cgroup.path.override', 'ops.cGroupOverrides.cpuPath'), + rename('cpuacct.cgroup.path.override', 'ops.cGroupOverrides.cpuAcctPath'), configPathDeprecation, dataPathDeprecation, rewriteBasePathDeprecation, diff --git a/src/core/server/config/integration_tests/config_deprecation.test.ts b/src/core/server/config/integration_tests/config_deprecation.test.ts index 65f5bbdac5248..3ebf6d507a2fd 100644 --- a/src/core/server/config/integration_tests/config_deprecation.test.ts +++ b/src/core/server/config/integration_tests/config_deprecation.test.ts @@ -19,7 +19,7 @@ import { mockLoggingSystem } from './config_deprecation.test.mocks'; import { loggingSystemMock } from '../../logging/logging_system.mock'; -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; describe('configuration deprecations', () => { let root: ReturnType; diff --git a/src/core/server/core_app/integration_tests/default_route_provider_config.test.ts b/src/core/server/core_app/integration_tests/default_route_provider_config.test.ts index 3284be5ba4750..340f45a0a2c18 100644 --- a/src/core/server/core_app/integration_tests/default_route_provider_config.test.ts +++ b/src/core/server/core_app/integration_tests/default_route_provider_config.test.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; import { Root } from '../../root'; const { startES } = kbnTestServer.createTestServers({ diff --git a/src/core/server/core_app/integration_tests/static_assets.test.ts b/src/core/server/core_app/integration_tests/static_assets.test.ts index 160ef064a14d9..ca03c4228221f 100644 --- a/src/core/server/core_app/integration_tests/static_assets.test.ts +++ b/src/core/server/core_app/integration_tests/static_assets.test.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; import { Root } from '../../root'; describe('Platform assets', function () { diff --git a/src/core/server/http/integration_tests/core_services.test.ts b/src/core/server/http/integration_tests/core_services.test.ts index 2b9193a280aec..f30ff66ed803a 100644 --- a/src/core/server/http/integration_tests/core_services.test.ts +++ b/src/core/server/http/integration_tests/core_services.test.ts @@ -30,7 +30,7 @@ import { LegacyElasticsearchErrorHelpers } from '../../elasticsearch/legacy'; import { elasticsearchClientMock } from '../../elasticsearch/client/mocks'; import { ResponseError } from '@elastic/elasticsearch/lib/errors'; -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; import { InternalElasticsearchServiceStart } from '../../elasticsearch'; interface User { diff --git a/src/core/server/http_resources/integration_tests/http_resources_service.test.ts b/src/core/server/http_resources/integration_tests/http_resources_service.test.ts index eee7dc2786076..624cdbb7f9655 100644 --- a/src/core/server/http_resources/integration_tests/http_resources_service.test.ts +++ b/src/core/server/http_resources/integration_tests/http_resources_service.test.ts @@ -17,7 +17,7 @@ * under the License. */ import { schema } from '@kbn/config-schema'; -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; describe('http resources service', () => { describe('register', () => { diff --git a/src/core/server/index.ts b/src/core/server/index.ts index 5422cbc2180ef..97aca74bfd48f 100644 --- a/src/core/server/index.ts +++ b/src/core/server/index.ts @@ -39,6 +39,7 @@ * @packageDocumentation */ +import { Type } from '@kbn/config-schema'; import { ElasticsearchServiceSetup, ILegacyScopedClusterClient, @@ -46,7 +47,6 @@ import { ElasticsearchServiceStart, IScopedClusterClient, } from './elasticsearch'; - import { HttpServiceSetup, HttpServiceStart } from './http'; import { HttpResources } from './http_resources'; @@ -63,12 +63,7 @@ import { CapabilitiesSetup, CapabilitiesStart } from './capabilities'; import { MetricsServiceStart } from './metrics'; import { StatusServiceSetup } from './status'; import { Auditor, AuditTrailSetup, AuditTrailStart } from './audit_trail'; -import { - LoggingServiceSetup, - appendersSchema, - loggerContextConfigSchema, - loggerSchema, -} from './logging'; +import { AppenderConfigType, appendersSchema, LoggingServiceSetup } from './logging'; export { AuditableEvent, Auditor, AuditorFactory, AuditTrailSetup } from './audit_trail'; export { bootstrap } from './bootstrap'; @@ -271,9 +266,7 @@ export { SavedObjectUnsanitizedDoc, SavedObjectsRepositoryFactory, SavedObjectsResolveImportErrorsOptions, - SavedObjectsSchema, SavedObjectsSerializer, - SavedObjectsLegacyService, SavedObjectsUpdateOptions, SavedObjectsUpdateResponse, SavedObjectsAddToNamespacesOptions, @@ -497,8 +490,6 @@ export const config = { schema: elasticsearchConfigSchema, }, logging: { - appenders: appendersSchema, - loggers: loggerSchema, - loggerContext: loggerContextConfigSchema, + appenders: appendersSchema as Type, }, }; diff --git a/src/core/server/legacy/config/get_unused_config_keys.test.ts b/src/core/server/legacy/config/get_unused_config_keys.test.ts index 2106a0748d814..f8506b5744030 100644 --- a/src/core/server/legacy/config/get_unused_config_keys.test.ts +++ b/src/core/server/legacy/config/get_unused_config_keys.test.ts @@ -217,34 +217,4 @@ describe('getUnusedConfigKeys', () => { }) ).toEqual([]); }); - - describe('using deprecation', () => { - it('should use the plugin deprecations provider', async () => { - expect( - await getUnusedConfigKeys({ - coreHandledConfigPaths: [], - pluginSpecs: [ - ({ - getDeprecationsProvider() { - return async ({ rename }: any) => [rename('foo1', 'foo2')]; - }, - getConfigPrefix: () => 'foo', - } as unknown) as LegacyPluginSpec, - ], - disabledPluginSpecs: [], - settings: { - foo: { - foo: 'dolly', - foo1: 'bar', - }, - }, - legacyConfig: getConfig({ - foo: { - foo2: 'bar', - }, - }), - }) - ).toEqual(['foo.foo']); - }); - }); }); diff --git a/src/core/server/legacy/config/get_unused_config_keys.ts b/src/core/server/legacy/config/get_unused_config_keys.ts index 354bf9af042cf..d10c574f04974 100644 --- a/src/core/server/legacy/config/get_unused_config_keys.ts +++ b/src/core/server/legacy/config/get_unused_config_keys.ts @@ -17,10 +17,7 @@ * under the License. */ -import { set } from '@elastic/safer-lodash-set'; -import { difference, get } from 'lodash'; -// @ts-expect-error -import { getTransform } from '../../../../legacy/deprecation/index'; +import { difference } from 'lodash'; import { unset } from '../../../../legacy/utils'; import { getFlattenedObject } from '../../../utils'; import { hasConfigPathIntersection } from '../../config'; @@ -41,21 +38,6 @@ export async function getUnusedConfigKeys({ settings: LegacyVars; legacyConfig: LegacyConfig; }) { - // transform deprecated plugin settings - for (let i = 0; i < pluginSpecs.length; i++) { - const spec = pluginSpecs[i]; - const transform = await getTransform(spec); - const prefix = spec.getConfigPrefix(); - - // nested plugin prefixes (a.b) translate to nested objects - const pluginSettings = get(settings, prefix); - if (pluginSettings) { - // flattened settings are expected to be converted to nested objects - // a.b = true => { a: { b: true }} - set(settings, prefix, transform(pluginSettings)); - } - } - // remove config values from disabled plugins for (const spec of disabledPluginSpecs) { unset(settings, spec.getConfigPrefix()); diff --git a/src/core/server/legacy/config/index.ts b/src/core/server/legacy/config/index.ts index f10e3f22d53c5..b56b83ca324cb 100644 --- a/src/core/server/legacy/config/index.ts +++ b/src/core/server/legacy/config/index.ts @@ -19,4 +19,3 @@ export { ensureValidConfiguration } from './ensure_valid_configuration'; export { LegacyObjectToConfigAdapter } from './legacy_object_to_config_adapter'; -export { convertLegacyDeprecationProvider } from './legacy_deprecation_adapters'; diff --git a/src/core/server/legacy/config/legacy_deprecation_adapters.test.ts b/src/core/server/legacy/config/legacy_deprecation_adapters.test.ts deleted file mode 100644 index b09f9d00b3bed..0000000000000 --- a/src/core/server/legacy/config/legacy_deprecation_adapters.test.ts +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { ConfigDeprecation } from '../../config'; -import { configDeprecationFactory } from '../../config/deprecation/deprecation_factory'; -import { applyDeprecations } from '../../config/deprecation/apply_deprecations'; -import { LegacyConfigDeprecationProvider } from '../types'; -import { convertLegacyDeprecationProvider } from './legacy_deprecation_adapters'; - -jest.spyOn(configDeprecationFactory, 'unusedFromRoot'); -jest.spyOn(configDeprecationFactory, 'renameFromRoot'); - -const executeHandlers = (handlers: ConfigDeprecation[]) => { - handlers.forEach((handler) => { - handler({}, '', () => null); - }); -}; - -describe('convertLegacyDeprecationProvider', () => { - beforeEach(() => { - jest.clearAllMocks(); - }); - - it('returns the same number of handlers', async () => { - const legacyProvider: LegacyConfigDeprecationProvider = ({ rename, unused }) => [ - rename('a', 'b'), - unused('c'), - unused('d'), - ]; - - const migrated = await convertLegacyDeprecationProvider(legacyProvider); - const handlers = migrated(configDeprecationFactory); - expect(handlers).toHaveLength(3); - }); - - it('invokes the factory "unusedFromRoot" when using legacy "unused"', async () => { - const legacyProvider: LegacyConfigDeprecationProvider = ({ rename, unused }) => [ - rename('a', 'b'), - unused('c'), - unused('d'), - ]; - - const migrated = await convertLegacyDeprecationProvider(legacyProvider); - const handlers = migrated(configDeprecationFactory); - executeHandlers(handlers); - - expect(configDeprecationFactory.unusedFromRoot).toHaveBeenCalledTimes(2); - expect(configDeprecationFactory.unusedFromRoot).toHaveBeenCalledWith('c'); - expect(configDeprecationFactory.unusedFromRoot).toHaveBeenCalledWith('d'); - }); - - it('invokes the factory "renameFromRoot" when using legacy "rename"', async () => { - const legacyProvider: LegacyConfigDeprecationProvider = ({ rename, unused }) => [ - rename('a', 'b'), - unused('c'), - rename('d', 'e'), - ]; - - const migrated = await convertLegacyDeprecationProvider(legacyProvider); - const handlers = migrated(configDeprecationFactory); - executeHandlers(handlers); - - expect(configDeprecationFactory.renameFromRoot).toHaveBeenCalledTimes(2); - expect(configDeprecationFactory.renameFromRoot).toHaveBeenCalledWith('a', 'b'); - expect(configDeprecationFactory.renameFromRoot).toHaveBeenCalledWith('d', 'e'); - }); - - it('properly works in a real use case', async () => { - const legacyProvider: LegacyConfigDeprecationProvider = ({ rename, unused }) => [ - rename('old', 'new'), - unused('unused'), - unused('notpresent'), - ]; - - const convertedProvider = await convertLegacyDeprecationProvider(legacyProvider); - const handlers = convertedProvider(configDeprecationFactory); - - const rawConfig = { - old: 'oldvalue', - unused: 'unused', - goodValue: 'good', - }; - - const migrated = applyDeprecations( - rawConfig, - handlers.map((handler) => ({ deprecation: handler, path: '' })) - ); - expect(migrated).toEqual({ new: 'oldvalue', goodValue: 'good' }); - }); -}); diff --git a/src/core/server/legacy/config/legacy_deprecation_adapters.ts b/src/core/server/legacy/config/legacy_deprecation_adapters.ts deleted file mode 100644 index 1e0733969e662..0000000000000 --- a/src/core/server/legacy/config/legacy_deprecation_adapters.ts +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { ConfigDeprecation, ConfigDeprecationProvider } from '../../config/deprecation'; -import { configDeprecationFactory } from '../../config/deprecation/deprecation_factory'; -import { LegacyConfigDeprecation, LegacyConfigDeprecationProvider } from '../types'; - -const convertLegacyDeprecation = ( - legacyDeprecation: LegacyConfigDeprecation -): ConfigDeprecation => (config, fromPath, logger) => { - legacyDeprecation(config, logger); - return config; -}; - -const legacyUnused = (unusedKey: string): LegacyConfigDeprecation => (settings, log) => { - const deprecation = configDeprecationFactory.unusedFromRoot(unusedKey); - deprecation(settings, '', log); -}; - -const legacyRename = (oldKey: string, newKey: string): LegacyConfigDeprecation => ( - settings, - log -) => { - const deprecation = configDeprecationFactory.renameFromRoot(oldKey, newKey); - deprecation(settings, '', log); -}; - -/** - * Async deprecation provider converter for legacy deprecation implementation - * - * @internal - */ -export const convertLegacyDeprecationProvider = async ( - legacyProvider: LegacyConfigDeprecationProvider -): Promise => { - const legacyDeprecations = await legacyProvider({ - rename: legacyRename, - unused: legacyUnused, - }); - return () => legacyDeprecations.map(convertLegacyDeprecation); -}; diff --git a/src/core/server/legacy/integration_tests/legacy_service.test.ts b/src/core/server/legacy/integration_tests/legacy_service.test.ts index 1dc8d53e7c3d6..ca3573e730d3f 100644 --- a/src/core/server/legacy/integration_tests/legacy_service.test.ts +++ b/src/core/server/legacy/integration_tests/legacy_service.test.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; describe('legacy service', () => { describe('http server', () => { diff --git a/src/core/server/legacy/integration_tests/logging.test.ts b/src/core/server/legacy/integration_tests/logging.test.ts index 2581c85debf26..2ebe17ea92978 100644 --- a/src/core/server/legacy/integration_tests/logging.test.ts +++ b/src/core/server/legacy/integration_tests/logging.test.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; import { getPlatformLogsFromMock, diff --git a/src/core/server/legacy/legacy_service.mock.ts b/src/core/server/legacy/legacy_service.mock.ts index 26ec52185a5d8..c27f5be04d965 100644 --- a/src/core/server/legacy/legacy_service.mock.ts +++ b/src/core/server/legacy/legacy_service.mock.ts @@ -24,13 +24,7 @@ type LegacyServiceMock = jest.Mocked & { legacyId const createDiscoverPluginsMock = (): LegacyServiceDiscoverPlugins => ({ pluginSpecs: [], - uiExports: { - savedObjectSchemas: {}, - savedObjectMappings: [], - savedObjectMigrations: {}, - savedObjectValidations: {}, - savedObjectsManagement: {}, - }, + uiExports: {}, navLinks: [], pluginExtendedConfig: { get: jest.fn(), diff --git a/src/core/server/legacy/legacy_service.test.ts b/src/core/server/legacy/legacy_service.test.ts index 45869fd12d2b4..d0492ea88c5e8 100644 --- a/src/core/server/legacy/legacy_service.test.ts +++ b/src/core/server/legacy/legacy_service.test.ts @@ -19,9 +19,7 @@ jest.mock('../../../legacy/server/kbn_server'); jest.mock('../../../cli/cluster/cluster_manager'); -jest.mock('./config/legacy_deprecation_adapters', () => ({ - convertLegacyDeprecationProvider: (provider: any) => Promise.resolve(provider), -})); + import { findLegacyPluginSpecsMock, logLegacyThirdPartyPluginDeprecationWarningMock, @@ -446,46 +444,8 @@ describe('#discoverPlugins()', () => { expect(findLegacyPluginSpecs).toHaveBeenCalledWith(expect.any(Object), logger, env.packageInfo); }); - it(`register legacy plugin's deprecation providers`, async () => { - findLegacyPluginSpecsMock.mockImplementation( - (settings) => - Promise.resolve({ - pluginSpecs: [ - { - getDeprecationsProvider: () => undefined, - }, - { - getDeprecationsProvider: () => 'providerA', - }, - { - getDeprecationsProvider: () => 'providerB', - }, - ], - pluginExtendedConfig: settings, - disabledPluginSpecs: [], - uiExports: {}, - navLinks: [], - }) as any - ); - - const legacyService = new LegacyService({ - coreId, - env, - logger, - configService: configService as any, - }); - - await legacyService.discoverPlugins(); - expect(configService.addDeprecationProvider).toHaveBeenCalledTimes(2); - expect(configService.addDeprecationProvider).toHaveBeenCalledWith('', 'providerA'); - expect(configService.addDeprecationProvider).toHaveBeenCalledWith('', 'providerB'); - }); - it(`logs deprecations for legacy third party plugins`, async () => { - const pluginSpecs = [ - { getId: () => 'pluginA', getDeprecationsProvider: () => undefined }, - { getId: () => 'pluginB', getDeprecationsProvider: () => undefined }, - ]; + const pluginSpecs = [{ getId: () => 'pluginA' }, { getId: () => 'pluginB' }]; findLegacyPluginSpecsMock.mockImplementation( (settings) => Promise.resolve({ diff --git a/src/core/server/legacy/legacy_service.ts b/src/core/server/legacy/legacy_service.ts index adfdecdd7c976..6e6d5cfc24340 100644 --- a/src/core/server/legacy/legacy_service.ts +++ b/src/core/server/legacy/legacy_service.ts @@ -21,7 +21,7 @@ import { combineLatest, ConnectableObservable, EMPTY, Observable, Subscription } import { first, map, publishReplay, tap } from 'rxjs/operators'; import { CoreService } from '../../types'; -import { Config, ConfigDeprecationProvider } from '../config'; +import { Config } from '../config'; import { CoreContext } from '../core_context'; import { CspConfigType, config as cspConfig } from '../csp'; import { DevConfig, DevConfigType, config as devConfig } from '../dev'; @@ -29,7 +29,6 @@ import { BasePathProxyServer, HttpConfig, HttpConfigType, config as httpConfig } import { Logger } from '../logging'; import { PathConfigType } from '../path'; import { findLegacyPluginSpecs, logLegacyThirdPartyPluginDeprecationWarning } from './plugins'; -import { convertLegacyDeprecationProvider } from './config'; import { ILegacyInternals, LegacyServiceSetupDeps, @@ -145,18 +144,6 @@ export class LegacyService implements CoreService { navLinks, }; - const deprecationProviders = await pluginSpecs - .map((spec) => spec.getDeprecationsProvider()) - .reduce(async (providers, current) => { - if (current) { - return [...(await providers), await convertLegacyDeprecationProvider(current)]; - } - return providers; - }, Promise.resolve([] as ConfigDeprecationProvider[])); - deprecationProviders.forEach((provider) => - this.coreContext.configService.addDeprecationProvider('', provider) - ); - this.legacyRawConfig = pluginExtendedConfig; // check for unknown uiExport types @@ -277,6 +264,7 @@ export class LegacyService implements CoreService { getTypeRegistry: startDeps.core.savedObjects.getTypeRegistry, }, metrics: { + collectionInterval: startDeps.core.metrics.collectionInterval, getOpsMetrics$: startDeps.core.metrics.getOpsMetrics$, }, uiSettings: { asScopedToClient: startDeps.core.uiSettings.asScopedToClient }, @@ -323,6 +311,17 @@ export class LegacyService implements CoreService { status: { core$: setupDeps.core.status.core$, overall$: setupDeps.core.status.overall$, + set: () => { + throw new Error(`core.status.set is unsupported in legacy`); + }, + // @ts-expect-error + get dependencies$() { + throw new Error(`core.status.dependencies$ is unsupported in legacy`); + }, + // @ts-expect-error + get derivedStatus$() { + throw new Error(`core.status.derivedStatus$ is unsupported in legacy`); + }, }, uiSettings: { register: setupDeps.core.uiSettings.register, @@ -354,11 +353,9 @@ export class LegacyService implements CoreService { registerStaticDir: setupDeps.core.http.registerStaticDir, }, hapiServer: setupDeps.core.http.server, - kibanaMigrator: startDeps.core.savedObjects.migrator, uiPlugins: setupDeps.uiPlugins, elasticsearch: setupDeps.core.elasticsearch, rendering: setupDeps.core.rendering, - savedObjectsClientProvider: startDeps.core.savedObjects.clientProvider, legacy: this.legacyInternals, }, logger: this.coreContext.logger, diff --git a/src/core/server/legacy/logging/appenders/legacy_appender.ts b/src/core/server/legacy/logging/appenders/legacy_appender.ts index 0c2f4ce93c3b8..a5d36423ba4c6 100644 --- a/src/core/server/legacy/logging/appenders/legacy_appender.ts +++ b/src/core/server/legacy/logging/appenders/legacy_appender.ts @@ -23,6 +23,11 @@ import { LogRecord } from '../../../logging/log_record'; import { LegacyLoggingServer } from '../legacy_logging_server'; import { LegacyVars } from '../../types'; +export interface LegacyAppenderConfig { + kind: 'legacy-appender'; + legacyLoggingConfig?: any; +} + /** * Simple appender that just forwards `LogRecord` to the legacy KbnServer log. * @internal diff --git a/src/core/server/legacy/plugins/__snapshots__/get_nav_links.test.ts.snap b/src/core/server/legacy/plugins/__snapshots__/get_nav_links.test.ts.snap deleted file mode 100644 index c1b7164908ed6..0000000000000 --- a/src/core/server/legacy/plugins/__snapshots__/get_nav_links.test.ts.snap +++ /dev/null @@ -1,56 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[` 1`] = ` -Array [ - Object { - "category": undefined, - "disableSubUrlTracking": undefined, - "disabled": false, - "euiIconType": undefined, - "hidden": false, - "icon": undefined, - "id": "link-a", - "linkToLastSubUrl": true, - "order": 0, - "subUrlBase": "/some-custom-url", - "title": "AppA", - "tooltip": "", - "url": "/some-custom-url", - }, - Object { - "category": undefined, - "disableSubUrlTracking": true, - "disabled": false, - "euiIconType": undefined, - "hidden": false, - "icon": undefined, - "id": "link-b", - "linkToLastSubUrl": true, - "order": 0, - "subUrlBase": "/url-b", - "title": "AppB", - "tooltip": "", - "url": "/url-b", - }, - Object { - "category": undefined, - "euiIconType": undefined, - "icon": undefined, - "id": "app-a", - "linkToLastSubUrl": true, - "order": 0, - "title": "AppA", - "url": "/app/app-a", - }, - Object { - "category": undefined, - "euiIconType": undefined, - "icon": undefined, - "id": "app-b", - "linkToLastSubUrl": true, - "order": 0, - "title": "AppB", - "url": "/app/app-b", - }, -] -`; diff --git a/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts b/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts index f3ec2ed8335c5..82e04496ffc3e 100644 --- a/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts +++ b/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts @@ -31,7 +31,6 @@ import { collectUiExports as collectLegacyUiExports } from '../../../../legacy/u import { LoggerFactory } from '../../logging'; import { PackageInfo } from '../../config'; import { LegacyPluginSpec, LegacyPluginPack, LegacyConfig } from '../types'; -import { getNavLinks } from './get_nav_links'; export async function findLegacyPluginSpecs( settings: unknown, @@ -125,13 +124,12 @@ export async function findLegacyPluginSpecs( log$.pipe(toArray()) ).toPromise(); const uiExports = collectLegacyUiExports(pluginSpecs); - const navLinks = getNavLinks(uiExports, pluginSpecs); return { disabledPluginSpecs, pluginSpecs, pluginExtendedConfig: configToMutate, uiExports, - navLinks, + navLinks: [], }; } diff --git a/src/core/server/legacy/plugins/get_nav_links.test.ts b/src/core/server/legacy/plugins/get_nav_links.test.ts deleted file mode 100644 index af10706d0ea08..0000000000000 --- a/src/core/server/legacy/plugins/get_nav_links.test.ts +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { LegacyUiExports, LegacyPluginSpec, LegacyAppSpec, LegacyNavLinkSpec } from '../types'; -import { getNavLinks } from './get_nav_links'; - -const createLegacyExports = ({ - uiAppSpecs = [], - navLinkSpecs = [], -}: { - uiAppSpecs?: LegacyAppSpec[]; - navLinkSpecs?: LegacyNavLinkSpec[]; -}): LegacyUiExports => ({ - uiAppSpecs, - navLinkSpecs, - injectedVarsReplacers: [], - defaultInjectedVarProviders: [], - savedObjectMappings: [], - savedObjectSchemas: {}, - savedObjectMigrations: {}, - savedObjectValidations: {}, - savedObjectsManagement: {}, -}); - -const createPluginSpecs = (...ids: string[]): LegacyPluginSpec[] => - ids.map( - (id) => - ({ - getId: () => id, - } as LegacyPluginSpec) - ); - -describe('getNavLinks', () => { - describe('generating from uiAppSpecs', () => { - it('generates navlinks from legacy app specs', () => { - const navlinks = getNavLinks( - createLegacyExports({ - uiAppSpecs: [ - { - id: 'app-a', - title: 'AppA', - pluginId: 'pluginA', - }, - { - id: 'app-b', - title: 'AppB', - pluginId: 'pluginA', - }, - ], - }), - createPluginSpecs('pluginA') - ); - - expect(navlinks.length).toEqual(2); - expect(navlinks[0]).toEqual( - expect.objectContaining({ - id: 'app-a', - title: 'AppA', - url: '/app/app-a', - }) - ); - expect(navlinks[1]).toEqual( - expect.objectContaining({ - id: 'app-b', - title: 'AppB', - url: '/app/app-b', - }) - ); - }); - - it('uses the app id to generates the navlink id even if pluginId is specified', () => { - const navlinks = getNavLinks( - createLegacyExports({ - uiAppSpecs: [ - { - id: 'app-a', - title: 'AppA', - pluginId: 'pluginA', - }, - { - id: 'app-b', - title: 'AppB', - pluginId: 'pluginA', - }, - ], - }), - createPluginSpecs('pluginA') - ); - - expect(navlinks.length).toEqual(2); - expect(navlinks[0].id).toEqual('app-a'); - expect(navlinks[1].id).toEqual('app-b'); - }); - - it('throws if an app reference a missing plugin', () => { - expect(() => { - getNavLinks( - createLegacyExports({ - uiAppSpecs: [ - { - id: 'app-a', - title: 'AppA', - pluginId: 'notExistingPlugin', - }, - ], - }), - createPluginSpecs('pluginA') - ); - }).toThrowErrorMatchingInlineSnapshot(`"Unknown plugin id \\"notExistingPlugin\\""`); - }); - - it('uses all known properties of the navlink', () => { - const navlinks = getNavLinks( - createLegacyExports({ - uiAppSpecs: [ - { - id: 'app-a', - title: 'AppA', - category: { - id: 'foo', - label: 'My Category', - }, - order: 42, - url: '/some-custom-url', - icon: 'fa-snowflake', - euiIconType: 'euiIcon', - linkToLastSubUrl: true, - hidden: false, - }, - ], - }), - [] - ); - expect(navlinks.length).toBe(1); - expect(navlinks[0]).toEqual({ - id: 'app-a', - title: 'AppA', - category: { - id: 'foo', - label: 'My Category', - }, - order: 42, - url: '/some-custom-url', - icon: 'fa-snowflake', - euiIconType: 'euiIcon', - linkToLastSubUrl: true, - }); - }); - }); - - describe('generating from navLinkSpecs', () => { - it('generates navlinks from legacy navLink specs', () => { - const navlinks = getNavLinks( - createLegacyExports({ - navLinkSpecs: [ - { - id: 'link-a', - title: 'AppA', - url: '/some-custom-url', - }, - { - id: 'link-b', - title: 'AppB', - url: '/some-other-url', - disableSubUrlTracking: true, - }, - ], - }), - createPluginSpecs('pluginA') - ); - - expect(navlinks.length).toEqual(2); - expect(navlinks[0]).toEqual( - expect.objectContaining({ - id: 'link-a', - title: 'AppA', - url: '/some-custom-url', - hidden: false, - disabled: false, - }) - ); - expect(navlinks[1]).toEqual( - expect.objectContaining({ - id: 'link-b', - title: 'AppB', - url: '/some-other-url', - disableSubUrlTracking: true, - }) - ); - }); - - it('only uses known properties to create the navlink', () => { - const navlinks = getNavLinks( - createLegacyExports({ - navLinkSpecs: [ - { - id: 'link-a', - title: 'AppA', - category: { - id: 'foo', - label: 'My Second Cat', - }, - order: 72, - url: '/some-other-custom', - subUrlBase: '/some-other-custom/sub', - disableSubUrlTracking: true, - icon: 'fa-corn', - euiIconType: 'euiIconBis', - linkToLastSubUrl: false, - hidden: false, - tooltip: 'My other tooltip', - }, - ], - }), - [] - ); - expect(navlinks.length).toBe(1); - expect(navlinks[0]).toEqual({ - id: 'link-a', - title: 'AppA', - category: { - id: 'foo', - label: 'My Second Cat', - }, - order: 72, - url: '/some-other-custom', - subUrlBase: '/some-other-custom/sub', - disableSubUrlTracking: true, - icon: 'fa-corn', - euiIconType: 'euiIconBis', - linkToLastSubUrl: false, - hidden: false, - disabled: false, - tooltip: 'My other tooltip', - }); - }); - }); - - describe('generating from both apps and navlinks', () => { - const navlinks = getNavLinks( - createLegacyExports({ - uiAppSpecs: [ - { - id: 'app-a', - title: 'AppA', - }, - { - id: 'app-b', - title: 'AppB', - }, - ], - navLinkSpecs: [ - { - id: 'link-a', - title: 'AppA', - url: '/some-custom-url', - }, - { - id: 'link-b', - title: 'AppB', - url: '/url-b', - disableSubUrlTracking: true, - }, - ], - }), - [] - ); - - expect(navlinks.length).toBe(4); - expect(navlinks).toMatchSnapshot(); - }); -}); diff --git a/src/core/server/legacy/plugins/get_nav_links.ts b/src/core/server/legacy/plugins/get_nav_links.ts deleted file mode 100644 index b1d22df41e345..0000000000000 --- a/src/core/server/legacy/plugins/get_nav_links.ts +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { - LegacyUiExports, - LegacyNavLink, - LegacyPluginSpec, - LegacyNavLinkSpec, - LegacyAppSpec, -} from '../types'; - -function legacyAppToNavLink(spec: LegacyAppSpec): LegacyNavLink { - if (!spec.id) { - throw new Error('Every app must specify an id'); - } - return { - id: spec.id, - category: spec.category, - title: spec.title ?? spec.id, - order: typeof spec.order === 'number' ? spec.order : 0, - icon: spec.icon, - euiIconType: spec.euiIconType, - url: spec.url || `/app/${spec.id}`, - linkToLastSubUrl: spec.linkToLastSubUrl ?? true, - }; -} - -function legacyLinkToNavLink(spec: LegacyNavLinkSpec): LegacyNavLink { - return { - id: spec.id, - category: spec.category, - title: spec.title, - order: typeof spec.order === 'number' ? spec.order : 0, - url: spec.url, - subUrlBase: spec.subUrlBase || spec.url, - disableSubUrlTracking: spec.disableSubUrlTracking, - icon: spec.icon, - euiIconType: spec.euiIconType, - linkToLastSubUrl: spec.linkToLastSubUrl ?? true, - hidden: spec.hidden ?? false, - disabled: spec.disabled ?? false, - tooltip: spec.tooltip ?? '', - }; -} - -function isHidden(app: LegacyAppSpec) { - return app.listed === false || app.hidden === true; -} - -export function getNavLinks(uiExports: LegacyUiExports, pluginSpecs: LegacyPluginSpec[]) { - const navLinkSpecs = uiExports.navLinkSpecs || []; - const appSpecs = (uiExports.uiAppSpecs || []).filter( - (app) => app !== undefined && !isHidden(app) - ) as LegacyAppSpec[]; - - const pluginIds = (pluginSpecs || []).map((spec) => spec.getId()); - appSpecs.forEach((spec) => { - if (spec.pluginId && !pluginIds.includes(spec.pluginId)) { - throw new Error(`Unknown plugin id "${spec.pluginId}"`); - } - }); - - return [...navLinkSpecs.map(legacyLinkToNavLink), ...appSpecs.map(legacyAppToNavLink)].sort( - (a, b) => a.order - b.order - ); -} diff --git a/src/core/server/legacy/plugins/log_legacy_plugins_warning.test.ts b/src/core/server/legacy/plugins/log_legacy_plugins_warning.test.ts index dfa2396d5904b..2317f1036ce42 100644 --- a/src/core/server/legacy/plugins/log_legacy_plugins_warning.test.ts +++ b/src/core/server/legacy/plugins/log_legacy_plugins_warning.test.ts @@ -26,7 +26,6 @@ const createPluginSpec = ({ id, path }: { id: string; path: string }): LegacyPlu getId: () => id, getExpectedKibanaVersion: () => 'kibana', getConfigPrefix: () => 'plugin.config', - getDeprecationsProvider: () => undefined, getPack: () => ({ getPath: () => path, }), diff --git a/src/core/server/legacy/types.ts b/src/core/server/legacy/types.ts index 98f8d874c7088..1105308fd44cf 100644 --- a/src/core/server/legacy/types.ts +++ b/src/core/server/legacy/types.ts @@ -24,7 +24,6 @@ import { KibanaRequest, LegacyRequest } from '../http'; import { InternalCoreSetup, InternalCoreStart } from '../internal_types'; import { PluginsServiceSetup, PluginsServiceStart, UiPlugins } from '../plugins'; import { InternalRenderingServiceSetup } from '../rendering'; -import { SavedObjectsLegacyUiExports } from '../types'; /** * @internal @@ -51,36 +50,6 @@ export interface LegacyConfig { set(config: LegacyVars): void; } -/** - * Representation of a legacy configuration deprecation factory used for - * legacy plugin deprecations. - * - * @internal - * @deprecated - */ -export interface LegacyConfigDeprecationFactory { - rename(oldKey: string, newKey: string): LegacyConfigDeprecation; - unused(unusedKey: string): LegacyConfigDeprecation; -} - -/** - * Representation of a legacy configuration deprecation. - * - * @internal - * @deprecated - */ -export type LegacyConfigDeprecation = (settings: LegacyVars, log: (msg: string) => void) => void; - -/** - * Representation of a legacy configuration deprecation provider. - * - * @internal - * @deprecated - */ -export type LegacyConfigDeprecationProvider = ( - factory: LegacyConfigDeprecationFactory -) => LegacyConfigDeprecation[] | Promise; - /** * @internal * @deprecated @@ -97,7 +66,6 @@ export interface LegacyPluginSpec { getId: () => unknown; getExpectedKibanaVersion: () => string; getConfigPrefix: () => string; - getDeprecationsProvider: () => LegacyConfigDeprecationProvider | undefined; getPack: () => LegacyPluginPack; } @@ -159,13 +127,13 @@ export type LegacyNavLink = Omit; unknown?: [{ pluginSpec: LegacyPluginSpec; type: unknown }]; -}; +} /** * @public diff --git a/src/core/server/logging/appenders/appenders.ts b/src/core/server/logging/appenders/appenders.ts index 3b90a10a1a76c..edfce4988275a 100644 --- a/src/core/server/logging/appenders/appenders.ts +++ b/src/core/server/logging/appenders/appenders.ts @@ -17,14 +17,17 @@ * under the License. */ -import { schema, TypeOf } from '@kbn/config-schema'; +import { schema } from '@kbn/config-schema'; import { assertNever } from '../../../utils'; -import { LegacyAppender } from '../../legacy/logging/appenders/legacy_appender'; +import { + LegacyAppender, + LegacyAppenderConfig, +} from '../../legacy/logging/appenders/legacy_appender'; import { Layouts } from '../layouts/layouts'; import { LogRecord } from '../log_record'; -import { ConsoleAppender } from './console/console_appender'; -import { FileAppender } from './file/file_appender'; +import { ConsoleAppender, ConsoleAppenderConfig } from './console/console_appender'; +import { FileAppender, FileAppenderConfig } from './file/file_appender'; /** * Config schema for validting the shape of the `appenders` key in in {@link LoggerContextConfigType} or @@ -39,7 +42,7 @@ export const appendersSchema = schema.oneOf([ ]); /** @public */ -export type AppenderConfigType = TypeOf; +export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig; /** * Entity that can append `LogRecord` instances to file, stdout, memory or whatever diff --git a/src/core/server/logging/appenders/console/console_appender.ts b/src/core/server/logging/appenders/console/console_appender.ts index b4420c12a23ca..a54674b1d347c 100644 --- a/src/core/server/logging/appenders/console/console_appender.ts +++ b/src/core/server/logging/appenders/console/console_appender.ts @@ -19,13 +19,19 @@ import { schema } from '@kbn/config-schema'; -import { Layout, Layouts } from '../../layouts/layouts'; +import { Layout, Layouts, LayoutConfigType } from '../../layouts/layouts'; import { LogRecord } from '../../log_record'; import { DisposableAppender } from '../appenders'; const { literal, object } = schema; +export interface ConsoleAppenderConfig { + kind: 'console'; + layout: LayoutConfigType; +} + /** + * * Appender that formats all the `LogRecord` instances it receives and logs them via built-in `console`. * @internal */ diff --git a/src/core/server/logging/appenders/file/file_appender.ts b/src/core/server/logging/appenders/file/file_appender.ts index 728e82ebcec9a..a0e484cd87c8f 100644 --- a/src/core/server/logging/appenders/file/file_appender.ts +++ b/src/core/server/logging/appenders/file/file_appender.ts @@ -20,10 +20,16 @@ import { schema } from '@kbn/config-schema'; import { createWriteStream, WriteStream } from 'fs'; -import { Layout, Layouts } from '../../layouts/layouts'; +import { Layout, Layouts, LayoutConfigType } from '../../layouts/layouts'; import { LogRecord } from '../../log_record'; import { DisposableAppender } from '../appenders'; +export interface FileAppenderConfig { + kind: 'file'; + layout: LayoutConfigType; + path: string; +} + /** * Appender that formats all the `LogRecord` instances it receives and writes them to the specified file. * @internal diff --git a/src/core/server/logging/integration_tests/logging.test.ts b/src/core/server/logging/integration_tests/logging.test.ts index 841c1ce15af47..7f6059567c46e 100644 --- a/src/core/server/logging/integration_tests/logging.test.ts +++ b/src/core/server/logging/integration_tests/logging.test.ts @@ -17,7 +17,7 @@ * under the License. */ -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; import { InternalCoreSetup } from '../../internal_types'; import { LoggerContextConfigInput } from '../logging_config'; import { Subject } from 'rxjs'; diff --git a/src/core/server/logging/layouts/json_layout.ts b/src/core/server/logging/layouts/json_layout.ts index 04416184a5957..37eb6b8c4806e 100644 --- a/src/core/server/logging/layouts/json_layout.ts +++ b/src/core/server/logging/layouts/json_layout.ts @@ -19,7 +19,7 @@ import moment from 'moment-timezone'; import { merge } from 'lodash'; -import { schema, TypeOf } from '@kbn/config-schema'; +import { schema } from '@kbn/config-schema'; import { LogRecord } from '../log_record'; import { Layout } from './layouts'; @@ -31,7 +31,9 @@ const jsonLayoutSchema = object({ }); /** @internal */ -export type JsonLayoutConfigType = TypeOf; +export interface JsonLayoutConfigType { + kind: 'json'; +} /** * Layout that just converts `LogRecord` into JSON string. diff --git a/src/core/server/logging/layouts/layouts.ts b/src/core/server/logging/layouts/layouts.ts index 0e6a6360cab2e..124c007bae104 100644 --- a/src/core/server/logging/layouts/layouts.ts +++ b/src/core/server/logging/layouts/layouts.ts @@ -26,7 +26,7 @@ import { PatternLayout, PatternLayoutConfigType } from './pattern_layout'; const { oneOf } = schema; -type LayoutConfigType = PatternLayoutConfigType | JsonLayoutConfigType; +export type LayoutConfigType = PatternLayoutConfigType | JsonLayoutConfigType; /** * Entity that can format `LogRecord` instance into a string. diff --git a/src/core/server/logging/layouts/pattern_layout.ts b/src/core/server/logging/layouts/pattern_layout.ts index 7839345a3703b..5dfc8aca77f18 100644 --- a/src/core/server/logging/layouts/pattern_layout.ts +++ b/src/core/server/logging/layouts/pattern_layout.ts @@ -17,7 +17,7 @@ * under the License. */ -import { schema, TypeOf } from '@kbn/config-schema'; +import { schema } from '@kbn/config-schema'; import { LogRecord } from '../log_record'; import { Layout } from './layouts'; @@ -58,7 +58,11 @@ const conversions: Conversion[] = [ ]; /** @internal */ -export type PatternLayoutConfigType = TypeOf; +export interface PatternLayoutConfigType { + kind: 'pattern'; + highlight?: boolean; + pattern?: string; +} /** * Layout that formats `LogRecord` using the `pattern` string with optional diff --git a/src/core/server/logging/logging_config.ts b/src/core/server/logging/logging_config.ts index a6aafabeb970c..a6ab15dc29fdf 100644 --- a/src/core/server/logging/logging_config.ts +++ b/src/core/server/logging/logging_config.ts @@ -96,7 +96,9 @@ export const config = { }), }; -export type LoggingConfigType = TypeOf; +export type LoggingConfigType = Omit, 'appenders'> & { + appenders: Map; +}; /** * Config schema for validating the inputs to the {@link LoggingServiceStart.configure} API. diff --git a/src/core/server/metrics/collectors/cgroup.test.ts b/src/core/server/metrics/collectors/cgroup.test.ts new file mode 100644 index 0000000000000..39f917b9f0ba1 --- /dev/null +++ b/src/core/server/metrics/collectors/cgroup.test.ts @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import mockFs from 'mock-fs'; +import { OsCgroupMetricsCollector } from './cgroup'; + +describe('OsCgroupMetricsCollector', () => { + afterEach(() => mockFs.restore()); + + it('returns empty object when no cgroup file present', async () => { + mockFs({ + '/proc/self': { + /** empty directory */ + }, + }); + + const collector = new OsCgroupMetricsCollector({}); + expect(await collector.collect()).toEqual({}); + }); + + it('collects default cgroup data', async () => { + mockFs({ + '/proc/self/cgroup': ` +123:memory:/groupname +123:cpu:/groupname +123:cpuacct:/groupname + `, + '/sys/fs/cgroup/cpuacct/groupname/cpuacct.usage': '111', + '/sys/fs/cgroup/cpu/groupname/cpu.cfs_period_us': '222', + '/sys/fs/cgroup/cpu/groupname/cpu.cfs_quota_us': '333', + '/sys/fs/cgroup/cpu/groupname/cpu.stat': ` +nr_periods 444 +nr_throttled 555 +throttled_time 666 + `, + }); + + const collector = new OsCgroupMetricsCollector({}); + expect(await collector.collect()).toMatchInlineSnapshot(` + Object { + "cpu": Object { + "cfs_period_micros": 222, + "cfs_quota_micros": 333, + "control_group": "/groupname", + "stat": Object { + "number_of_elapsed_periods": 444, + "number_of_times_throttled": 555, + "time_throttled_nanos": 666, + }, + }, + "cpuacct": Object { + "control_group": "/groupname", + "usage_nanos": 111, + }, + } + `); + }); + + it('collects override cgroup data', async () => { + mockFs({ + '/proc/self/cgroup': ` +123:memory:/groupname +123:cpu:/groupname +123:cpuacct:/groupname + `, + '/sys/fs/cgroup/cpuacct/xxcustomcpuacctxx/cpuacct.usage': '111', + '/sys/fs/cgroup/cpu/xxcustomcpuxx/cpu.cfs_period_us': '222', + '/sys/fs/cgroup/cpu/xxcustomcpuxx/cpu.cfs_quota_us': '333', + '/sys/fs/cgroup/cpu/xxcustomcpuxx/cpu.stat': ` +nr_periods 444 +nr_throttled 555 +throttled_time 666 + `, + }); + + const collector = new OsCgroupMetricsCollector({ + cpuAcctPath: 'xxcustomcpuacctxx', + cpuPath: 'xxcustomcpuxx', + }); + expect(await collector.collect()).toMatchInlineSnapshot(` + Object { + "cpu": Object { + "cfs_period_micros": 222, + "cfs_quota_micros": 333, + "control_group": "xxcustomcpuxx", + "stat": Object { + "number_of_elapsed_periods": 444, + "number_of_times_throttled": 555, + "time_throttled_nanos": 666, + }, + }, + "cpuacct": Object { + "control_group": "xxcustomcpuacctxx", + "usage_nanos": 111, + }, + } + `); + }); +}); diff --git a/src/core/server/metrics/collectors/cgroup.ts b/src/core/server/metrics/collectors/cgroup.ts new file mode 100644 index 0000000000000..867ea44dff1ae --- /dev/null +++ b/src/core/server/metrics/collectors/cgroup.ts @@ -0,0 +1,194 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import fs from 'fs'; +import { join as joinPath } from 'path'; +import { MetricsCollector, OpsOsMetrics } from './types'; + +type OsCgroupMetrics = Pick; + +interface OsCgroupMetricsCollectorOptions { + cpuPath?: string; + cpuAcctPath?: string; +} + +export class OsCgroupMetricsCollector implements MetricsCollector { + /** Used to prevent unnecessary file reads on systems not using cgroups. */ + private noCgroupPresent = false; + private cpuPath?: string; + private cpuAcctPath?: string; + + constructor(private readonly options: OsCgroupMetricsCollectorOptions) {} + + public async collect(): Promise { + try { + await this.initializePaths(); + if (this.noCgroupPresent || !this.cpuAcctPath || !this.cpuPath) { + return {}; + } + + const [cpuAcctUsage, cpuFsPeriod, cpuFsQuota, cpuStat] = await Promise.all([ + readCPUAcctUsage(this.cpuAcctPath), + readCPUFsPeriod(this.cpuPath), + readCPUFsQuota(this.cpuPath), + readCPUStat(this.cpuPath), + ]); + + return { + cpuacct: { + control_group: this.cpuAcctPath, + usage_nanos: cpuAcctUsage, + }, + + cpu: { + control_group: this.cpuPath, + cfs_period_micros: cpuFsPeriod, + cfs_quota_micros: cpuFsQuota, + stat: cpuStat, + }, + }; + } catch (err) { + if (err.code === 'ENOENT') { + this.noCgroupPresent = true; + return {}; + } else { + throw err; + } + } + } + + public reset() {} + + private async initializePaths() { + // Perform this setup lazily on the first collect call and then memoize the results. + // Makes the assumption this data doesn't change while the process is running. + if (this.cpuPath && this.cpuAcctPath) { + return; + } + + // Only read the file if both options are undefined. + if (!this.options.cpuPath || !this.options.cpuAcctPath) { + const cgroups = await readControlGroups(); + this.cpuPath = this.options.cpuPath || cgroups[GROUP_CPU]; + this.cpuAcctPath = this.options.cpuAcctPath || cgroups[GROUP_CPUACCT]; + } else { + this.cpuPath = this.options.cpuPath; + this.cpuAcctPath = this.options.cpuAcctPath; + } + + // prevents undefined cgroup paths + if (!this.cpuPath || !this.cpuAcctPath) { + this.noCgroupPresent = true; + } + } +} + +const CONTROL_GROUP_RE = new RegExp('\\d+:([^:]+):(/.*)'); +const CONTROLLER_SEPARATOR_RE = ','; + +const PROC_SELF_CGROUP_FILE = '/proc/self/cgroup'; +const PROC_CGROUP_CPU_DIR = '/sys/fs/cgroup/cpu'; +const PROC_CGROUP_CPUACCT_DIR = '/sys/fs/cgroup/cpuacct'; + +const GROUP_CPUACCT = 'cpuacct'; +const CPUACCT_USAGE_FILE = 'cpuacct.usage'; + +const GROUP_CPU = 'cpu'; +const CPU_FS_PERIOD_US_FILE = 'cpu.cfs_period_us'; +const CPU_FS_QUOTA_US_FILE = 'cpu.cfs_quota_us'; +const CPU_STATS_FILE = 'cpu.stat'; + +async function readControlGroups() { + const data = await fs.promises.readFile(PROC_SELF_CGROUP_FILE); + + return data + .toString() + .split(/\n/) + .reduce((acc, line) => { + const matches = line.match(CONTROL_GROUP_RE); + + if (matches !== null) { + const controllers = matches[1].split(CONTROLLER_SEPARATOR_RE); + controllers.forEach((controller) => { + acc[controller] = matches[2]; + }); + } + + return acc; + }, {} as Record); +} + +async function fileContentsToInteger(path: string) { + const data = await fs.promises.readFile(path); + return parseInt(data.toString(), 10); +} + +function readCPUAcctUsage(controlGroup: string) { + return fileContentsToInteger(joinPath(PROC_CGROUP_CPUACCT_DIR, controlGroup, CPUACCT_USAGE_FILE)); +} + +function readCPUFsPeriod(controlGroup: string) { + return fileContentsToInteger(joinPath(PROC_CGROUP_CPU_DIR, controlGroup, CPU_FS_PERIOD_US_FILE)); +} + +function readCPUFsQuota(controlGroup: string) { + return fileContentsToInteger(joinPath(PROC_CGROUP_CPU_DIR, controlGroup, CPU_FS_QUOTA_US_FILE)); +} + +async function readCPUStat(controlGroup: string) { + const stat = { + number_of_elapsed_periods: -1, + number_of_times_throttled: -1, + time_throttled_nanos: -1, + }; + + try { + const data = await fs.promises.readFile( + joinPath(PROC_CGROUP_CPU_DIR, controlGroup, CPU_STATS_FILE) + ); + return data + .toString() + .split(/\n/) + .reduce((acc, line) => { + const fields = line.split(/\s+/); + + switch (fields[0]) { + case 'nr_periods': + acc.number_of_elapsed_periods = parseInt(fields[1], 10); + break; + + case 'nr_throttled': + acc.number_of_times_throttled = parseInt(fields[1], 10); + break; + + case 'throttled_time': + acc.time_throttled_nanos = parseInt(fields[1], 10); + break; + } + + return acc; + }, stat); + } catch (err) { + if (err.code === 'ENOENT') { + return stat; + } + + throw err; + } +} diff --git a/src/core/server/metrics/collectors/collector.mock.ts b/src/core/server/metrics/collectors/collector.mock.ts new file mode 100644 index 0000000000000..2a942e1fafe63 --- /dev/null +++ b/src/core/server/metrics/collectors/collector.mock.ts @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { MetricsCollector } from './types'; + +const createCollector = (collectReturnValue: any = {}): jest.Mocked> => { + const collector: jest.Mocked> = { + collect: jest.fn().mockResolvedValue(collectReturnValue), + reset: jest.fn(), + }; + + return collector; +}; + +export const metricsCollectorMock = { + create: createCollector, +}; diff --git a/src/core/server/metrics/collectors/index.ts b/src/core/server/metrics/collectors/index.ts index f58ab02e63881..4540cb79be74b 100644 --- a/src/core/server/metrics/collectors/index.ts +++ b/src/core/server/metrics/collectors/index.ts @@ -18,6 +18,6 @@ */ export { OpsProcessMetrics, OpsOsMetrics, OpsServerMetrics, MetricsCollector } from './types'; -export { OsMetricsCollector } from './os'; +export { OsMetricsCollector, OpsMetricsCollectorOptions } from './os'; export { ProcessMetricsCollector } from './process'; export { ServerMetricsCollector } from './server'; diff --git a/src/core/server/metrics/collectors/os.test.mocks.ts b/src/core/server/metrics/collectors/os.test.mocks.ts new file mode 100644 index 0000000000000..ee02b8c802151 --- /dev/null +++ b/src/core/server/metrics/collectors/os.test.mocks.ts @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { metricsCollectorMock } from './collector.mock'; + +export const cgroupCollectorMock = metricsCollectorMock.create(); +jest.doMock('./cgroup', () => ({ + OsCgroupMetricsCollector: jest.fn(() => cgroupCollectorMock), +})); diff --git a/src/core/server/metrics/collectors/os.test.ts b/src/core/server/metrics/collectors/os.test.ts index 7d5a6da90b7d6..5e52cecb76be3 100644 --- a/src/core/server/metrics/collectors/os.test.ts +++ b/src/core/server/metrics/collectors/os.test.ts @@ -20,6 +20,7 @@ jest.mock('getos', () => (cb: Function) => cb(null, { dist: 'distrib', release: 'release' })); import os from 'os'; +import { cgroupCollectorMock } from './os.test.mocks'; import { OsMetricsCollector } from './os'; describe('OsMetricsCollector', () => { @@ -27,6 +28,8 @@ describe('OsMetricsCollector', () => { beforeEach(() => { collector = new OsMetricsCollector(); + cgroupCollectorMock.collect.mockReset(); + cgroupCollectorMock.reset.mockReset(); }); afterEach(() => { @@ -96,4 +99,9 @@ describe('OsMetricsCollector', () => { '15m': fifteenMinLoad, }); }); + + it('calls the cgroup sub-collector', async () => { + await collector.collect(); + expect(cgroupCollectorMock.collect).toHaveBeenCalled(); + }); }); diff --git a/src/core/server/metrics/collectors/os.ts b/src/core/server/metrics/collectors/os.ts index 59bef9d8ddd2b..eae49278405a9 100644 --- a/src/core/server/metrics/collectors/os.ts +++ b/src/core/server/metrics/collectors/os.ts @@ -21,10 +21,22 @@ import os from 'os'; import getosAsync, { LinuxOs } from 'getos'; import { promisify } from 'util'; import { OpsOsMetrics, MetricsCollector } from './types'; +import { OsCgroupMetricsCollector } from './cgroup'; const getos = promisify(getosAsync); +export interface OpsMetricsCollectorOptions { + cpuPath?: string; + cpuAcctPath?: string; +} + export class OsMetricsCollector implements MetricsCollector { + private readonly cgroupCollector: OsCgroupMetricsCollector; + + constructor(options: OpsMetricsCollectorOptions = {}) { + this.cgroupCollector = new OsCgroupMetricsCollector(options); + } + public async collect(): Promise { const platform = os.platform(); const load = os.loadavg(); @@ -43,20 +55,30 @@ export class OsMetricsCollector implements MetricsCollector { used_in_bytes: os.totalmem() - os.freemem(), }, uptime_in_millis: os.uptime() * 1000, + ...(await this.getDistroStats(platform)), + ...(await this.cgroupCollector.collect()), }; + return metrics; + } + + public reset() {} + + private async getDistroStats( + platform: string + ): Promise> { if (platform === 'linux') { try { const distro = (await getos()) as LinuxOs; - metrics.distro = distro.dist; - metrics.distroRelease = `${distro.dist}-${distro.release}`; + return { + distro: distro.dist, + distroRelease: `${distro.dist}-${distro.release}`, + }; } catch (e) { // ignore errors } } - return metrics; + return {}; } - - public reset() {} } diff --git a/src/core/server/metrics/collectors/types.ts b/src/core/server/metrics/collectors/types.ts index 73e8975a6b362..77ea13a1f0787 100644 --- a/src/core/server/metrics/collectors/types.ts +++ b/src/core/server/metrics/collectors/types.ts @@ -85,6 +85,33 @@ export interface OpsOsMetrics { }; /** the OS uptime */ uptime_in_millis: number; + + /** cpu accounting metrics, undefined when not running in a cgroup */ + cpuacct?: { + /** name of this process's cgroup */ + control_group: string; + /** cpu time used by this process's cgroup */ + usage_nanos: number; + }; + + /** cpu cgroup metrics, undefined when not running in a cgroup */ + cpu?: { + /** name of this process's cgroup */ + control_group: string; + /** the length of the cfs period */ + cfs_period_micros: number; + /** total available run-time within a cfs period */ + cfs_quota_micros: number; + /** current stats on the cfs periods */ + stat: { + /** number of cfs periods that elapsed */ + number_of_elapsed_periods: number; + /** number of times the cgroup has been throttled */ + number_of_times_throttled: number; + /** total amount of time the cgroup has been throttled for */ + time_throttled_nanos: number; + }; + }; } /** diff --git a/src/core/server/metrics/metrics_service.mock.ts b/src/core/server/metrics/metrics_service.mock.ts index 769f6ee2a549a..2af653004a479 100644 --- a/src/core/server/metrics/metrics_service.mock.ts +++ b/src/core/server/metrics/metrics_service.mock.ts @@ -21,20 +21,18 @@ import { MetricsService } from './metrics_service'; import { InternalMetricsServiceSetup, InternalMetricsServiceStart, + MetricsServiceSetup, MetricsServiceStart, } from './types'; const createInternalSetupContractMock = () => { - const setupContract: jest.Mocked = {}; - return setupContract; -}; - -const createStartContractMock = () => { - const startContract: jest.Mocked = { + const setupContract: jest.Mocked = { + collectionInterval: 30000, getOpsMetrics$: jest.fn(), }; - startContract.getOpsMetrics$.mockReturnValue( + setupContract.getOpsMetrics$.mockReturnValue( new BehaviorSubject({ + collected_at: new Date('2020-01-01 01:00:00'), process: { memory: { heap: { total_in_bytes: 1, used_in_bytes: 1, size_limit: 1 }, @@ -56,11 +54,21 @@ const createStartContractMock = () => { concurrent_connections: 1, }) ); + return setupContract; +}; + +const createSetupContractMock = () => { + const startContract: jest.Mocked = createInternalSetupContractMock(); return startContract; }; const createInternalStartContractMock = () => { - const startContract: jest.Mocked = createStartContractMock(); + const startContract: jest.Mocked = createInternalSetupContractMock(); + return startContract; +}; + +const createStartContractMock = () => { + const startContract: jest.Mocked = createInternalSetupContractMock(); return startContract; }; @@ -77,7 +85,7 @@ const createMock = () => { export const metricsServiceMock = { create: createMock, - createSetupContract: createStartContractMock, + createSetupContract: createSetupContractMock, createStartContract: createStartContractMock, createInternalSetupContract: createInternalSetupContractMock, createInternalStartContract: createInternalStartContractMock, diff --git a/src/core/server/metrics/metrics_service.ts b/src/core/server/metrics/metrics_service.ts index f28fb21aaac0d..d4696b3aa9aaf 100644 --- a/src/core/server/metrics/metrics_service.ts +++ b/src/core/server/metrics/metrics_service.ts @@ -17,7 +17,7 @@ * under the License. */ -import { Subject } from 'rxjs'; +import { ReplaySubject } from 'rxjs'; import { first } from 'rxjs/operators'; import { CoreService } from '../../types'; import { CoreContext } from '../core_context'; @@ -37,26 +37,21 @@ export class MetricsService private readonly logger: Logger; private metricsCollector?: OpsMetricsCollector; private collectInterval?: NodeJS.Timeout; - private metrics$ = new Subject(); + private metrics$ = new ReplaySubject(); + private service?: InternalMetricsServiceSetup; constructor(private readonly coreContext: CoreContext) { this.logger = coreContext.logger.get('metrics'); } public async setup({ http }: MetricsServiceSetupDeps): Promise { - this.metricsCollector = new OpsMetricsCollector(http.server); - return {}; - } - - public async start(): Promise { - if (!this.metricsCollector) { - throw new Error('#setup() needs to be run first'); - } const config = await this.coreContext.configService .atPath(opsConfig.path) .pipe(first()) .toPromise(); + this.metricsCollector = new OpsMetricsCollector(http.server, config.cGroupOverrides); + await this.refreshMetrics(); this.collectInterval = setInterval(() => { @@ -65,9 +60,20 @@ export class MetricsService const metricsObservable = this.metrics$.asObservable(); - return { + this.service = { + collectionInterval: config.interval.asMilliseconds(), getOpsMetrics$: () => metricsObservable, }; + + return this.service; + } + + public async start(): Promise { + if (!this.service) { + throw new Error('#setup() needs to be run first'); + } + + return this.service; } private async refreshMetrics() { diff --git a/src/core/server/metrics/ops_config.ts b/src/core/server/metrics/ops_config.ts index bd6ae5cc5474d..5f3f67e931c38 100644 --- a/src/core/server/metrics/ops_config.ts +++ b/src/core/server/metrics/ops_config.ts @@ -23,6 +23,10 @@ export const opsConfig = { path: 'ops', schema: schema.object({ interval: schema.duration({ defaultValue: '5s' }), + cGroupOverrides: schema.object({ + cpuPath: schema.maybe(schema.string()), + cpuAcctPath: schema.maybe(schema.string()), + }), }), }; diff --git a/src/core/server/metrics/ops_metrics_collector.test.ts b/src/core/server/metrics/ops_metrics_collector.test.ts index 9e76895b14578..7aa3f7cd3baf0 100644 --- a/src/core/server/metrics/ops_metrics_collector.test.ts +++ b/src/core/server/metrics/ops_metrics_collector.test.ts @@ -30,7 +30,7 @@ describe('OpsMetricsCollector', () => { beforeEach(() => { const hapiServer = httpServiceMock.createInternalSetupContract().server; - collector = new OpsMetricsCollector(hapiServer); + collector = new OpsMetricsCollector(hapiServer, {}); mockOsCollector.collect.mockResolvedValue('osMetrics'); }); @@ -51,6 +51,7 @@ describe('OpsMetricsCollector', () => { expect(mockServerCollector.collect).toHaveBeenCalledTimes(1); expect(metrics).toEqual({ + collected_at: expect.any(Date), process: 'processMetrics', os: 'osMetrics', requests: 'serverRequestsMetrics', diff --git a/src/core/server/metrics/ops_metrics_collector.ts b/src/core/server/metrics/ops_metrics_collector.ts index 525515dba1457..af74caa6cb386 100644 --- a/src/core/server/metrics/ops_metrics_collector.ts +++ b/src/core/server/metrics/ops_metrics_collector.ts @@ -21,6 +21,7 @@ import { Server as HapiServer } from 'hapi'; import { ProcessMetricsCollector, OsMetricsCollector, + OpsMetricsCollectorOptions, ServerMetricsCollector, MetricsCollector, } from './collectors'; @@ -31,9 +32,9 @@ export class OpsMetricsCollector implements MetricsCollector { private readonly osCollector: OsMetricsCollector; private readonly serverCollector: ServerMetricsCollector; - constructor(server: HapiServer) { + constructor(server: HapiServer, opsOptions: OpsMetricsCollectorOptions) { this.processCollector = new ProcessMetricsCollector(); - this.osCollector = new OsMetricsCollector(); + this.osCollector = new OsMetricsCollector(opsOptions); this.serverCollector = new ServerMetricsCollector(server); } @@ -44,6 +45,7 @@ export class OpsMetricsCollector implements MetricsCollector { this.serverCollector.collect(), ]); return { + collected_at: new Date(), process, os, ...server, diff --git a/src/core/server/metrics/types.ts b/src/core/server/metrics/types.ts index cbf0acacd6bab..c177b3ed25115 100644 --- a/src/core/server/metrics/types.ts +++ b/src/core/server/metrics/types.ts @@ -20,14 +20,15 @@ import { Observable } from 'rxjs'; import { OpsProcessMetrics, OpsOsMetrics, OpsServerMetrics } from './collectors'; -// eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface MetricsServiceSetup {} /** * APIs to retrieves metrics gathered and exposed by the core platform. * * @public */ -export interface MetricsServiceStart { +export interface MetricsServiceSetup { + /** Interval metrics are collected in milliseconds */ + readonly collectionInterval: number; + /** * Retrieve an observable emitting the {@link OpsMetrics} gathered. * The observable will emit an initial value during core's `start` phase, and a new value every fixed interval of time, @@ -42,6 +43,12 @@ export interface MetricsServiceStart { */ getOpsMetrics$: () => Observable; } +/** + * {@inheritdoc MetricsServiceSetup} + * + * @public + */ +export type MetricsServiceStart = MetricsServiceSetup; export type InternalMetricsServiceSetup = MetricsServiceSetup; export type InternalMetricsServiceStart = MetricsServiceStart; @@ -53,6 +60,8 @@ export type InternalMetricsServiceStart = MetricsServiceStart; * @public */ export interface OpsMetrics { + /** Time metrics were recorded at. */ + collected_at: Date; /** Process related metrics */ process: OpsProcessMetrics; /** OS related metrics */ diff --git a/src/core/server/plugins/plugin_context.ts b/src/core/server/plugins/plugin_context.ts index fa2659ca130a0..af0b0e19b3227 100644 --- a/src/core/server/plugins/plugin_context.ts +++ b/src/core/server/plugins/plugin_context.ts @@ -185,6 +185,9 @@ export function createPluginSetupContext( status: { core$: deps.status.core$, overall$: deps.status.overall$, + set: deps.status.plugins.set.bind(null, plugin.name), + dependencies$: deps.status.plugins.getDependenciesStatus$(plugin.name), + derivedStatus$: deps.status.plugins.getDerivedStatus$(plugin.name), }, uiSettings: { register: deps.uiSettings.register, @@ -233,6 +236,7 @@ export function createPluginStartContext( getTypeRegistry: deps.savedObjects.getTypeRegistry, }, metrics: { + collectionInterval: deps.metrics.collectionInterval, getOpsMetrics$: deps.metrics.getOpsMetrics$, }, uiSettings: { diff --git a/src/core/server/plugins/plugins_system.test.ts b/src/core/server/plugins/plugins_system.test.ts index 7af77491df1ab..71ac31db13f92 100644 --- a/src/core/server/plugins/plugins_system.test.ts +++ b/src/core/server/plugins/plugins_system.test.ts @@ -100,15 +100,27 @@ test('getPluginDependencies returns dependency tree of symbols', () => { pluginsSystem.addPlugin(createPlugin('no-dep')); expect(pluginsSystem.getPluginDependencies()).toMatchInlineSnapshot(` - Map { - Symbol(plugin-a) => Array [ - Symbol(no-dep), - ], - Symbol(plugin-b) => Array [ - Symbol(plugin-a), - Symbol(no-dep), - ], - Symbol(no-dep) => Array [], + Object { + "asNames": Map { + "plugin-a" => Array [ + "no-dep", + ], + "plugin-b" => Array [ + "plugin-a", + "no-dep", + ], + "no-dep" => Array [], + }, + "asOpaqueIds": Map { + Symbol(plugin-a) => Array [ + Symbol(no-dep), + ], + Symbol(plugin-b) => Array [ + Symbol(plugin-a), + Symbol(no-dep), + ], + Symbol(no-dep) => Array [], + }, } `); }); diff --git a/src/core/server/plugins/plugins_system.ts b/src/core/server/plugins/plugins_system.ts index f5c1b35d678a3..b2acd9a6fd04b 100644 --- a/src/core/server/plugins/plugins_system.ts +++ b/src/core/server/plugins/plugins_system.ts @@ -20,10 +20,11 @@ import { CoreContext } from '../core_context'; import { Logger } from '../logging'; import { PluginWrapper } from './plugin'; -import { DiscoveredPlugin, PluginName, PluginOpaqueId } from './types'; +import { DiscoveredPlugin, PluginName } from './types'; import { createPluginSetupContext, createPluginStartContext } from './plugin_context'; import { PluginsServiceSetupDeps, PluginsServiceStartDeps } from './plugins_service'; import { withTimeout } from '../../utils'; +import { PluginDependencies } from '.'; const Sec = 1000; /** @internal */ @@ -45,9 +46,19 @@ export class PluginsSystem { * @returns a ReadonlyMap of each plugin and an Array of its available dependencies * @internal */ - public getPluginDependencies(): ReadonlyMap { - // Return dependency map of opaque ids - return new Map( + public getPluginDependencies(): PluginDependencies { + const asNames = new Map( + [...this.plugins].map(([name, plugin]) => [ + plugin.name, + [ + ...new Set([ + ...plugin.requiredPlugins, + ...plugin.optionalPlugins.filter((optPlugin) => this.plugins.has(optPlugin)), + ]), + ].map((depId) => this.plugins.get(depId)!.name), + ]) + ); + const asOpaqueIds = new Map( [...this.plugins].map(([name, plugin]) => [ plugin.opaqueId, [ @@ -58,6 +69,8 @@ export class PluginsSystem { ].map((depId) => this.plugins.get(depId)!.opaqueId), ]) ); + + return { asNames, asOpaqueIds }; } public async setupPlugins(deps: PluginsServiceSetupDeps) { diff --git a/src/core/server/plugins/types.ts b/src/core/server/plugins/types.ts index eb2a9ca3daf5f..517261b5bc9bb 100644 --- a/src/core/server/plugins/types.ts +++ b/src/core/server/plugins/types.ts @@ -93,6 +93,12 @@ export type PluginName = string; /** @public */ export type PluginOpaqueId = symbol; +/** @internal */ +export interface PluginDependencies { + asNames: ReadonlyMap; + asOpaqueIds: ReadonlyMap; +} + /** * Describes the set of required and optional properties plugin can define in its * mandatory JSON manifest file. diff --git a/src/core/server/rendering/__snapshots__/rendering_service.test.ts.snap b/src/core/server/rendering/__snapshots__/rendering_service.test.ts.snap index 5ff5d69f96f70..ab828a1780425 100644 --- a/src/core/server/rendering/__snapshots__/rendering_service.test.ts.snap +++ b/src/core/server/rendering/__snapshots__/rendering_service.test.ts.snap @@ -46,7 +46,6 @@ Object { }, "version": Any, }, - "legacyMode": false, "serverBasePath": "/mock-server-basepath", "uiPlugins": Array [], "vars": Object {}, @@ -100,7 +99,6 @@ Object { }, "version": Any, }, - "legacyMode": false, "serverBasePath": "/mock-server-basepath", "uiPlugins": Array [], "vars": Object {}, @@ -158,7 +156,6 @@ Object { }, "version": Any, }, - "legacyMode": false, "serverBasePath": "/mock-server-basepath", "uiPlugins": Array [], "vars": Object {}, @@ -212,7 +209,6 @@ Object { }, "version": Any, }, - "legacyMode": false, "serverBasePath": "/mock-server-basepath", "uiPlugins": Array [], "vars": Object {}, @@ -266,7 +262,6 @@ Object { }, "version": Any, }, - "legacyMode": false, "serverBasePath": "/mock-server-basepath", "uiPlugins": Array [], "vars": Object {}, diff --git a/src/core/server/rendering/rendering_service.tsx b/src/core/server/rendering/rendering_service.tsx index e7ee0b16fce08..7761c89044f6f 100644 --- a/src/core/server/rendering/rendering_service.tsx +++ b/src/core/server/rendering/rendering_service.tsx @@ -82,7 +82,6 @@ export class RenderingService implements CoreService { diff --git a/src/core/server/saved_objects/index.ts b/src/core/server/saved_objects/index.ts index a294b28753f7b..f2bae29c4743b 100644 --- a/src/core/server/saved_objects/index.ts +++ b/src/core/server/saved_objects/index.ts @@ -19,8 +19,6 @@ export * from './service'; -export { SavedObjectsSchema } from './schema'; - export * from './import'; export { diff --git a/src/core/server/saved_objects/migrations/core/document_migrator.test.ts b/src/core/server/saved_objects/migrations/core/document_migrator.test.ts index 4fc94d1992869..4cc4f696d307c 100644 --- a/src/core/server/saved_objects/migrations/core/document_migrator.test.ts +++ b/src/core/server/saved_objects/migrations/core/document_migrator.test.ts @@ -48,7 +48,6 @@ describe('DocumentMigrator', () => { return { kibanaVersion: '25.2.3', typeRegistry: createRegistry(), - validateDoc: _.noop, log: mockLogger, }; } @@ -60,7 +59,6 @@ describe('DocumentMigrator', () => { name: 'foo', migrations: _.noop as any, }), - validateDoc: _.noop, log: mockLogger, }; expect(() => new DocumentMigrator(invalidDefinition)).toThrow( @@ -77,7 +75,6 @@ describe('DocumentMigrator', () => { bar: (doc) => doc, }, }), - validateDoc: _.noop, log: mockLogger, }; expect(() => new DocumentMigrator(invalidDefinition)).toThrow( @@ -94,7 +91,6 @@ describe('DocumentMigrator', () => { '1.2.3': 23 as any, }, }), - validateDoc: _.noop, log: mockLogger, }; expect(() => new DocumentMigrator(invalidDefinition)).toThrow( @@ -633,27 +629,6 @@ describe('DocumentMigrator', () => { bbb: '3.2.3', }); }); - - test('fails if the validate doc throws', () => { - const migrator = new DocumentMigrator({ - ...testOpts(), - typeRegistry: createRegistry({ - name: 'aaa', - migrations: { - '2.3.4': (d) => set(d, 'attributes.counter', 42), - }, - }), - validateDoc: (d) => { - if ((d.attributes as any).counter === 42) { - throw new Error('Meaningful!'); - } - }, - }); - - const doc = { id: '1', type: 'foo', attributes: {}, migrationVersion: {}, aaa: {} }; - - expect(() => migrator.migrate(doc)).toThrow(/Meaningful/); - }); }); function renameAttr(path: string, newPath: string) { diff --git a/src/core/server/saved_objects/migrations/core/document_migrator.ts b/src/core/server/saved_objects/migrations/core/document_migrator.ts index c50f755fda994..345704fbfd783 100644 --- a/src/core/server/saved_objects/migrations/core/document_migrator.ts +++ b/src/core/server/saved_objects/migrations/core/document_migrator.ts @@ -73,12 +73,9 @@ import { SavedObjectMigrationFn } from '../types'; export type TransformFn = (doc: SavedObjectUnsanitizedDoc) => SavedObjectUnsanitizedDoc; -type ValidateDoc = (doc: SavedObjectUnsanitizedDoc) => void; - interface DocumentMigratorOptions { kibanaVersion: string; typeRegistry: ISavedObjectTypeRegistry; - validateDoc: ValidateDoc; log: Logger; } @@ -113,19 +110,16 @@ export class DocumentMigrator implements VersionedTransformer { * @param {DocumentMigratorOptions} opts * @prop {string} kibanaVersion - The current version of Kibana * @prop {SavedObjectTypeRegistry} typeRegistry - The type registry to get type migrations from - * @prop {ValidateDoc} validateDoc - A function which, given a document throws an error if it is - * not up to date. This is used to ensure we don't let unmigrated documents slip through. * @prop {Logger} log - The migration logger * @memberof DocumentMigrator */ - constructor({ typeRegistry, kibanaVersion, log, validateDoc }: DocumentMigratorOptions) { + constructor({ typeRegistry, kibanaVersion, log }: DocumentMigratorOptions) { validateMigrationDefinition(typeRegistry); this.migrations = buildActiveMigrations(typeRegistry, log); this.transformDoc = buildDocumentTransform({ kibanaVersion, migrations: this.migrations, - validateDoc, }); } @@ -231,21 +225,16 @@ function buildActiveMigrations( * Creates a function which migrates and validates any document that is passed to it. */ function buildDocumentTransform({ - kibanaVersion, migrations, - validateDoc, }: { kibanaVersion: string; migrations: ActiveMigrations; - validateDoc: ValidateDoc; }): TransformFn { return function transformAndValidate(doc: SavedObjectUnsanitizedDoc) { const result = doc.migrationVersion ? applyMigrations(doc, migrations) : markAsUpToDate(doc, migrations); - validateDoc(result); - // In order to keep tests a bit more stable, we won't // tack on an empy migrationVersion to docs that have // no migrations defined. diff --git a/src/core/server/saved_objects/migrations/core/index_migrator.test.ts b/src/core/server/saved_objects/migrations/core/index_migrator.test.ts index df89137a1d798..13f771c16bc67 100644 --- a/src/core/server/saved_objects/migrations/core/index_migrator.test.ts +++ b/src/core/server/saved_objects/migrations/core/index_migrator.test.ts @@ -369,6 +369,30 @@ describe('IndexMigrator', () => { ], }); }); + + test('rejects when the migration function throws an error', async () => { + const { client } = testOpts; + const migrateDoc = jest.fn((doc: SavedObjectUnsanitizedDoc) => { + throw new Error('error migrating document'); + }); + + testOpts.documentMigrator = { + migrationVersion: { foo: '1.2.3' }, + migrate: migrateDoc, + }; + + withIndex(client, { + numOutOfDate: 1, + docs: [ + [{ _id: 'foo:1', _source: { type: 'foo', foo: { name: 'Bar' } } }], + [{ _id: 'foo:2', _source: { type: 'foo', foo: { name: 'Baz' } } }], + ], + }); + + await expect(new IndexMigrator(testOpts).migrate()).rejects.toThrowErrorMatchingInlineSnapshot( + `"error migrating document"` + ); + }); }); function withIndex( diff --git a/src/core/server/saved_objects/migrations/core/migrate_raw_docs.test.ts b/src/core/server/saved_objects/migrations/core/migrate_raw_docs.test.ts index 4c9d2e870a7bb..83dc042d2b96b 100644 --- a/src/core/server/saved_objects/migrations/core/migrate_raw_docs.test.ts +++ b/src/core/server/saved_objects/migrations/core/migrate_raw_docs.test.ts @@ -90,4 +90,18 @@ describe('migrateRawDocs', () => { expect(logger.error).toBeCalledTimes(1); }); + + test('rejects when the transform function throws an error', async () => { + const transform = jest.fn((doc: any) => { + throw new Error('error during transform'); + }); + await expect( + migrateRawDocs( + new SavedObjectsSerializer(new SavedObjectTypeRegistry()), + transform, + [{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } }], + createSavedObjectsMigrationLoggerMock() + ) + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error during transform"`); + }); }); diff --git a/src/core/server/saved_objects/migrations/core/migrate_raw_docs.ts b/src/core/server/saved_objects/migrations/core/migrate_raw_docs.ts index 2bdf59d25dc74..5a5048d8ad88f 100644 --- a/src/core/server/saved_objects/migrations/core/migrate_raw_docs.ts +++ b/src/core/server/saved_objects/migrations/core/migrate_raw_docs.ts @@ -78,10 +78,14 @@ function transformNonBlocking( ): (doc: SavedObjectUnsanitizedDoc) => Promise { // promises aren't enough to unblock the event loop return (doc: SavedObjectUnsanitizedDoc) => - new Promise((resolve) => { + new Promise((resolve, reject) => { // set immediate is though setImmediate(() => { - resolve(transform(doc)); + try { + resolve(transform(doc)); + } catch (e) { + reject(e); + } }); }); } diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts index cc443093e30a3..7eb2cfefe4620 100644 --- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts +++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts @@ -134,7 +134,6 @@ const mockOptions = () => { const options: MockedOptions = { logger: loggingSystemMock.create().get(), kibanaVersion: '8.2.3', - savedObjectValidations: {}, typeRegistry: createRegistry([ { name: 'testtype', diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts index 85b9099308807..18a385c6994b8 100644 --- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts +++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts @@ -28,7 +28,6 @@ import { BehaviorSubject } from 'rxjs'; import { Logger } from '../../../logging'; import { IndexMapping, SavedObjectsTypeMappingDefinitions } from '../../mappings'; import { SavedObjectUnsanitizedDoc, SavedObjectsSerializer } from '../../serialization'; -import { docValidator, PropertyValidators } from '../../validation'; import { buildActiveMappings, IndexMigrator, MigrationResult, MigrationStatus } from '../core'; import { DocumentMigrator, VersionedTransformer } from '../core/document_migrator'; import { MigrationEsClient } from '../core/'; @@ -44,7 +43,6 @@ export interface KibanaMigratorOptions { kibanaConfig: KibanaConfigType; kibanaVersion: string; logger: Logger; - savedObjectValidations: PropertyValidators; } export type IKibanaMigrator = Pick; @@ -80,7 +78,6 @@ export class KibanaMigrator { typeRegistry, kibanaConfig, savedObjectsConfig, - savedObjectValidations, kibanaVersion, logger, }: KibanaMigratorOptions) { @@ -94,7 +91,6 @@ export class KibanaMigrator { this.documentMigrator = new DocumentMigrator({ kibanaVersion, typeRegistry, - validateDoc: docValidator(savedObjectValidations || {}), log: this.log, }); // Building the active mappings (and associated md5sums) is an expensive @@ -124,9 +120,17 @@ export class KibanaMigrator { Array<{ status: string }> > { if (this.migrationResult === undefined || rerun) { - this.status$.next({ status: 'running' }); + // Reruns are only used by CI / EsArchiver. Publishing status updates on reruns results in slowing down CI + // unnecessarily, so we skip it in this case. + if (!rerun) { + this.status$.next({ status: 'running' }); + } + this.migrationResult = this.runMigrationsInternal().then((result) => { - this.status$.next({ status: 'completed', result }); + // Similar to above, don't publish status updates when rerunning in CI. + if (!rerun) { + this.status$.next({ status: 'completed', result }); + } return result; }); } diff --git a/src/core/server/saved_objects/routes/export.ts b/src/core/server/saved_objects/routes/export.ts index 9445c144ecda4..35a65d8d9651f 100644 --- a/src/core/server/saved_objects/routes/export.ts +++ b/src/core/server/saved_objects/routes/export.ts @@ -19,11 +19,7 @@ import { schema } from '@kbn/config-schema'; import stringify from 'json-stable-stringify'; -import { - createPromiseFromStreams, - createMapStream, - createConcatStream, -} from '../../../../legacy/utils/streams'; +import { createPromiseFromStreams, createMapStream, createConcatStream } from '../../utils/streams'; import { IRouter } from '../../http'; import { SavedObjectConfig } from '../saved_objects_config'; import { exportSavedObjectsToStream } from '../export'; diff --git a/src/core/server/saved_objects/routes/integration_tests/export.test.ts b/src/core/server/saved_objects/routes/integration_tests/export.test.ts index d47f7c6050d8f..a3891712fd22b 100644 --- a/src/core/server/saved_objects/routes/integration_tests/export.test.ts +++ b/src/core/server/saved_objects/routes/integration_tests/export.test.ts @@ -22,7 +22,7 @@ jest.mock('../../export', () => ({ })); import * as exportMock from '../../export'; -import { createListStream } from '../../../../../legacy/utils/streams'; +import { createListStream } from '../../../utils/streams'; import supertest from 'supertest'; import { UnwrapPromise } from '@kbn/utility-types'; import { SavedObjectConfig } from '../../saved_objects_config'; diff --git a/src/core/server/saved_objects/routes/integration_tests/migrate.test.ts b/src/core/server/saved_objects/routes/integration_tests/migrate.test.ts index 7a0e39b71afb8..e003d564c1ea2 100644 --- a/src/core/server/saved_objects/routes/integration_tests/migrate.test.ts +++ b/src/core/server/saved_objects/routes/integration_tests/migrate.test.ts @@ -18,7 +18,7 @@ */ import { migratorInstanceMock } from './migrate.test.mocks'; -import * as kbnTestServer from '../../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../../test_helpers/kbn_server'; describe('SavedObjects /_migrate endpoint', () => { let root: ReturnType; diff --git a/src/core/server/saved_objects/routes/utils.test.ts b/src/core/server/saved_objects/routes/utils.test.ts index 24719724785af..fd3bdad8606ed 100644 --- a/src/core/server/saved_objects/routes/utils.test.ts +++ b/src/core/server/saved_objects/routes/utils.test.ts @@ -19,7 +19,7 @@ import { createSavedObjectsStreamFromNdJson, validateTypes, validateObjects } from './utils'; import { Readable } from 'stream'; -import { createPromiseFromStreams, createConcatStream } from '../../../../legacy/utils/streams'; +import { createPromiseFromStreams, createConcatStream } from '../../utils/streams'; async function readStreamToCompletion(stream: Readable) { return createPromiseFromStreams([stream, createConcatStream([])]); diff --git a/src/core/server/saved_objects/routes/utils.ts b/src/core/server/saved_objects/routes/utils.ts index 3963833a9c718..f16a6e471257d 100644 --- a/src/core/server/saved_objects/routes/utils.ts +++ b/src/core/server/saved_objects/routes/utils.ts @@ -19,11 +19,7 @@ import { Readable } from 'stream'; import { SavedObject, SavedObjectsExportResultDetails } from 'src/core/server'; -import { - createSplitStream, - createMapStream, - createFilterStream, -} from '../../../../legacy/utils/streams'; +import { createSplitStream, createMapStream, createFilterStream } from '../../utils/streams'; export function createSavedObjectsStreamFromNdJson(ndJsonStream: Readable) { return ndJsonStream diff --git a/src/core/server/saved_objects/saved_objects_service.mock.ts b/src/core/server/saved_objects/saved_objects_service.mock.ts index 6f5ecb1eb464b..e3d44c20dd190 100644 --- a/src/core/server/saved_objects/saved_objects_service.mock.ts +++ b/src/core/server/saved_objects/saved_objects_service.mock.ts @@ -26,8 +26,7 @@ import { SavedObjectsServiceSetup, SavedObjectsServiceStart, } from './saved_objects_service'; -import { mockKibanaMigrator } from './migrations/kibana/kibana_migrator.mock'; -import { savedObjectsClientProviderMock } from './service/lib/scoped_client_provider.mock'; + import { savedObjectsRepositoryMock } from './service/lib/repository.mock'; import { savedObjectsClientMock } from './service/saved_objects_client.mock'; import { typeRegistryMock } from './saved_objects_type_registry.mock'; @@ -54,11 +53,7 @@ const createStartContractMock = () => { }; const createInternalStartContractMock = () => { - const internalStartContract: jest.Mocked = { - ...createStartContractMock(), - clientProvider: savedObjectsClientProviderMock.create(), - migrator: mockKibanaMigrator.create(), - }; + const internalStartContract: jest.Mocked = createStartContractMock(); return internalStartContract; }; diff --git a/src/core/server/saved_objects/saved_objects_service.test.ts b/src/core/server/saved_objects/saved_objects_service.test.ts index 8df6a07318c45..d6b30889eba5f 100644 --- a/src/core/server/saved_objects/saved_objects_service.test.ts +++ b/src/core/server/saved_objects/saved_objects_service.test.ts @@ -33,7 +33,6 @@ import { Env } from '../config'; import { configServiceMock } from '../mocks'; import { elasticsearchServiceMock } from '../elasticsearch/elasticsearch_service.mock'; import { elasticsearchClientMock } from '../elasticsearch/client/mocks'; -import { legacyServiceMock } from '../legacy/legacy_service.mock'; import { httpServiceMock } from '../http/http_service.mock'; import { httpServerMock } from '../http/http_server.mocks'; import { SavedObjectsClientFactoryProvider } from './service/lib'; @@ -65,7 +64,6 @@ describe('SavedObjectsService', () => { return { http: httpServiceMock.createInternalSetupContract(), elasticsearch: elasticsearchMock, - legacyPlugins: legacyServiceMock.createDiscoverPlugins(), }; }; @@ -239,8 +237,7 @@ describe('SavedObjectsService', () => { await soService.setup(createSetupDeps()); expect(migratorInstanceMock.runMigrations).toHaveBeenCalledTimes(0); - const startContract = await soService.start(createStartDeps()); - expect(startContract.migrator).toBe(migratorInstanceMock); + await soService.start(createStartDeps()); expect(migratorInstanceMock.runMigrations).toHaveBeenCalledTimes(1); }); diff --git a/src/core/server/saved_objects/saved_objects_service.ts b/src/core/server/saved_objects/saved_objects_service.ts index f05e912b12ad8..5cc59d55a254e 100644 --- a/src/core/server/saved_objects/saved_objects_service.ts +++ b/src/core/server/saved_objects/saved_objects_service.ts @@ -23,12 +23,10 @@ import { CoreService } from '../../types'; import { SavedObjectsClient, SavedObjectsClientProvider, - ISavedObjectsClientProvider, SavedObjectsClientProviderOptions, } from './'; import { KibanaMigrator, IKibanaMigrator } from './migrations'; import { CoreContext } from '../core_context'; -import { LegacyServiceDiscoverPlugins } from '../legacy'; import { ElasticsearchClient, IClusterClient, @@ -49,9 +47,7 @@ import { SavedObjectsClientWrapperFactory, } from './service/lib/scoped_client_provider'; import { Logger } from '../logging'; -import { convertLegacyTypes } from './utils'; import { SavedObjectTypeRegistry, ISavedObjectTypeRegistry } from './saved_objects_type_registry'; -import { PropertyValidators } from './validation'; import { SavedObjectsSerializer } from './serialization'; import { registerRoutes } from './routes'; import { ServiceStatus } from '../status'; @@ -67,9 +63,6 @@ import { createMigrationEsClient } from './migrations/core/'; * the factory provided to `setClientFactory` and wrapped by all wrappers * registered through `addClientWrapper`. * - * All the setup APIs will throw if called after the service has started, and therefor cannot be used - * from legacy plugin code. Legacy plugins should use the legacy savedObject service until migrated. - * * @example * ```ts * import { SavedObjectsClient, CoreSetup } from 'src/core/server'; @@ -155,9 +148,6 @@ export interface SavedObjectsServiceSetup { * } * } * ``` - * - * @remarks The type definition is an aggregation of the legacy savedObjects `schema`, `mappings` and `migration` concepts. - * This API is the single entry point to register saved object types in the new platform. */ registerType: (type: SavedObjectsType) => void; @@ -230,16 +220,7 @@ export interface SavedObjectsServiceStart { getTypeRegistry: () => ISavedObjectTypeRegistry; } -export interface InternalSavedObjectsServiceStart extends SavedObjectsServiceStart { - /** - * @deprecated Exposed only for injecting into Legacy - */ - migrator: IKibanaMigrator; - /** - * @deprecated Exposed only for injecting into Legacy - */ - clientProvider: ISavedObjectsClientProvider; -} +export type InternalSavedObjectsServiceStart = SavedObjectsServiceStart; /** * Factory provided when invoking a {@link SavedObjectsClientFactoryProvider | client factory provider} @@ -271,7 +252,6 @@ export interface SavedObjectsRepositoryFactory { /** @internal */ export interface SavedObjectsSetupDeps { http: InternalHttpServiceSetup; - legacyPlugins: LegacyServiceDiscoverPlugins; elasticsearch: InternalElasticsearchServiceSetup; } @@ -296,9 +276,8 @@ export class SavedObjectsService private clientFactoryProvider?: SavedObjectsClientFactoryProvider; private clientFactoryWrappers: WrappedClientFactoryWrapper[] = []; - private migrator$ = new Subject(); + private migrator$ = new Subject(); private typeRegistry = new SavedObjectTypeRegistry(); - private validations: PropertyValidators = {}; private started = false; constructor(private readonly coreContext: CoreContext) { @@ -310,13 +289,6 @@ export class SavedObjectsService this.setupDeps = setupDeps; - const legacyTypes = convertLegacyTypes( - setupDeps.legacyPlugins.uiExports, - setupDeps.legacyPlugins.pluginExtendedConfig - ); - legacyTypes.forEach((type) => this.typeRegistry.registerType(type)); - this.validations = setupDeps.legacyPlugins.uiExports.savedObjectValidations || {}; - const savedObjectsConfig = await this.coreContext.configService .atPath('savedObjects') .pipe(first()) @@ -471,8 +443,6 @@ export class SavedObjectsService this.started = true; return { - migrator, - clientProvider, getScopedClient: clientProvider.getClient.bind(clientProvider), createScopedRepository: repositoryFactory.createScopedRepository, createInternalRepository: repositoryFactory.createInternalRepository, @@ -488,13 +458,12 @@ export class SavedObjectsService savedObjectsConfig: SavedObjectsMigrationConfigType, client: IClusterClient, migrationsRetryDelay?: number - ): KibanaMigrator { + ): IKibanaMigrator { return new KibanaMigrator({ typeRegistry: this.typeRegistry, logger: this.logger, kibanaVersion: this.coreContext.env.packageInfo.version, savedObjectsConfig, - savedObjectValidations: this.validations, kibanaConfig, client: createMigrationEsClient(client.asInternalUser, this.logger, migrationsRetryDelay), }); diff --git a/src/core/server/saved_objects/schema/index.ts b/src/core/server/saved_objects/schema/index.ts deleted file mode 100644 index d30bbb8d34cd3..0000000000000 --- a/src/core/server/saved_objects/schema/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { SavedObjectsSchema, SavedObjectsSchemaDefinition } from './schema'; diff --git a/src/core/server/saved_objects/schema/schema.test.ts b/src/core/server/saved_objects/schema/schema.test.ts deleted file mode 100644 index f2daa13e43fce..0000000000000 --- a/src/core/server/saved_objects/schema/schema.test.ts +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { SavedObjectsSchema, SavedObjectsSchemaDefinition } from './schema'; - -describe('#isNamespaceAgnostic', () => { - const expectResult = (expected: boolean, schemaDefinition?: SavedObjectsSchemaDefinition) => { - const schema = new SavedObjectsSchema(schemaDefinition); - const result = schema.isNamespaceAgnostic('foo'); - expect(result).toBe(expected); - }; - - it(`returns false when no schema is defined`, () => { - expectResult(false); - }); - - it(`returns false for unknown types`, () => { - expectResult(false, { bar: {} }); - }); - - it(`returns false for non-namespace-agnostic type`, () => { - expectResult(false, { foo: { isNamespaceAgnostic: false } }); - expectResult(false, { foo: { isNamespaceAgnostic: undefined } }); - }); - - it(`returns true for explicitly namespace-agnostic type`, () => { - expectResult(true, { foo: { isNamespaceAgnostic: true } }); - }); -}); - -describe('#isSingleNamespace', () => { - const expectResult = (expected: boolean, schemaDefinition?: SavedObjectsSchemaDefinition) => { - const schema = new SavedObjectsSchema(schemaDefinition); - const result = schema.isSingleNamespace('foo'); - expect(result).toBe(expected); - }; - - it(`returns true when no schema is defined`, () => { - expectResult(true); - }); - - it(`returns true for unknown types`, () => { - expectResult(true, { bar: {} }); - }); - - it(`returns false for explicitly namespace-agnostic type`, () => { - expectResult(false, { foo: { isNamespaceAgnostic: true } }); - }); - - it(`returns false for explicitly multi-namespace type`, () => { - expectResult(false, { foo: { multiNamespace: true } }); - }); - - it(`returns true for non-namespace-agnostic and non-multi-namespace type`, () => { - expectResult(true, { foo: { isNamespaceAgnostic: false, multiNamespace: false } }); - expectResult(true, { foo: { isNamespaceAgnostic: false, multiNamespace: undefined } }); - expectResult(true, { foo: { isNamespaceAgnostic: undefined, multiNamespace: false } }); - expectResult(true, { foo: { isNamespaceAgnostic: undefined, multiNamespace: undefined } }); - }); -}); - -describe('#isMultiNamespace', () => { - const expectResult = (expected: boolean, schemaDefinition?: SavedObjectsSchemaDefinition) => { - const schema = new SavedObjectsSchema(schemaDefinition); - const result = schema.isMultiNamespace('foo'); - expect(result).toBe(expected); - }; - - it(`returns false when no schema is defined`, () => { - expectResult(false); - }); - - it(`returns false for unknown types`, () => { - expectResult(false, { bar: {} }); - }); - - it(`returns false for explicitly namespace-agnostic type`, () => { - expectResult(false, { foo: { isNamespaceAgnostic: true } }); - }); - - it(`returns false for non-multi-namespace type`, () => { - expectResult(false, { foo: { multiNamespace: false } }); - expectResult(false, { foo: { multiNamespace: undefined } }); - }); - - it(`returns true for non-namespace-agnostic and explicitly multi-namespace type`, () => { - expectResult(true, { foo: { isNamespaceAgnostic: false, multiNamespace: true } }); - expectResult(true, { foo: { isNamespaceAgnostic: undefined, multiNamespace: true } }); - }); -}); diff --git a/src/core/server/saved_objects/schema/schema.ts b/src/core/server/saved_objects/schema/schema.ts deleted file mode 100644 index ba1905158e822..0000000000000 --- a/src/core/server/saved_objects/schema/schema.ts +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { LegacyConfig } from '../../legacy'; - -/** - * @deprecated - * @internal - **/ -interface SavedObjectsSchemaTypeDefinition { - isNamespaceAgnostic?: boolean; - multiNamespace?: boolean; - hidden?: boolean; - indexPattern?: ((config: LegacyConfig) => string) | string; - convertToAliasScript?: string; -} - -/** - * @deprecated - * @internal - **/ -export interface SavedObjectsSchemaDefinition { - [type: string]: SavedObjectsSchemaTypeDefinition; -} - -/** - * @deprecated This is only used by the {@link SavedObjectsLegacyService | legacy savedObjects service} - * @internal - **/ -export class SavedObjectsSchema { - private readonly definition?: SavedObjectsSchemaDefinition; - constructor(schemaDefinition?: SavedObjectsSchemaDefinition) { - this.definition = schemaDefinition; - } - - public isHiddenType(type: string) { - if (this.definition && this.definition.hasOwnProperty(type)) { - return Boolean(this.definition[type].hidden); - } - - return false; - } - - public getIndexForType(config: LegacyConfig, type: string): string | undefined { - if (this.definition != null && this.definition.hasOwnProperty(type)) { - const { indexPattern } = this.definition[type]; - return typeof indexPattern === 'function' ? indexPattern(config) : indexPattern; - } else { - return undefined; - } - } - - public getConvertToAliasScript(type: string): string | undefined { - if (this.definition != null && this.definition.hasOwnProperty(type)) { - return this.definition[type].convertToAliasScript; - } - } - - public isNamespaceAgnostic(type: string) { - // if no plugins have registered a Saved Objects Schema, - // this.schema will be undefined, and no types are namespace agnostic - if (!this.definition) { - return false; - } - - const typeSchema = this.definition[type]; - if (!typeSchema) { - return false; - } - return Boolean(typeSchema.isNamespaceAgnostic); - } - - public isSingleNamespace(type: string) { - // if no plugins have registered a Saved Objects Schema, - // this.schema will be undefined, and all types are namespace isolated - if (!this.definition) { - return true; - } - - const typeSchema = this.definition[type]; - if (!typeSchema) { - return true; - } - return !Boolean(typeSchema.isNamespaceAgnostic) && !Boolean(typeSchema.multiNamespace); - } - - public isMultiNamespace(type: string) { - // if no plugins have registered a Saved Objects Schema, - // this.schema will be undefined, and no types are multi-namespace - if (!this.definition) { - return false; - } - - const typeSchema = this.definition[type]; - if (!typeSchema) { - return false; - } - return !Boolean(typeSchema.isNamespaceAgnostic) && Boolean(typeSchema.multiNamespace); - } -} diff --git a/src/core/server/saved_objects/service/index.ts b/src/core/server/saved_objects/service/index.ts index 9f625b4732e26..271d4dd67d43e 100644 --- a/src/core/server/saved_objects/service/index.ts +++ b/src/core/server/saved_objects/service/index.ts @@ -17,37 +17,6 @@ * under the License. */ -import { Readable } from 'stream'; -import { SavedObjectsClientProvider } from './lib'; -import { SavedObjectsClient } from './saved_objects_client'; -import { SavedObjectsExportOptions } from '../export'; -import { SavedObjectsImportOptions, SavedObjectsImportResponse } from '../import'; -import { SavedObjectsSchema } from '../schema'; -import { SavedObjectsResolveImportErrorsOptions } from '../import/types'; - -/** - * @internal - * @deprecated - */ -export interface SavedObjectsLegacyService { - // ATTENTION: these types are incomplete - addScopedSavedObjectsClientWrapperFactory: SavedObjectsClientProvider['addClientWrapperFactory']; - setScopedSavedObjectsClientFactory: SavedObjectsClientProvider['setClientFactory']; - getScopedSavedObjectsClient: SavedObjectsClientProvider['getClient']; - SavedObjectsClient: typeof SavedObjectsClient; - types: string[]; - schema: SavedObjectsSchema; - getSavedObjectsRepository(...rest: any[]): any; - importExport: { - objectLimit: number; - importSavedObjects(options: SavedObjectsImportOptions): Promise; - resolveImportErrors( - options: SavedObjectsResolveImportErrorsOptions - ): Promise; - getSortedObjectsForExport(options: SavedObjectsExportOptions): Promise; - }; -} - export { SavedObjectsRepository, SavedObjectsClientProvider, diff --git a/src/core/server/saved_objects/service/lib/filter_utils.test.ts b/src/core/server/saved_objects/service/lib/filter_utils.test.ts index 4d9bcdda3c8ae..60e8aa0afdda4 100644 --- a/src/core/server/saved_objects/service/lib/filter_utils.test.ts +++ b/src/core/server/saved_objects/service/lib/filter_utils.test.ts @@ -83,7 +83,19 @@ const mockMappings = { describe('Filter Utils', () => { describe('#validateConvertFilterToKueryNode', () => { - test('Validate a simple filter', () => { + test('Empty string filters are ignored', () => { + expect(validateConvertFilterToKueryNode(['foo'], '', mockMappings)).toBeUndefined(); + }); + test('Validate a simple KQL KueryNode filter', () => { + expect( + validateConvertFilterToKueryNode( + ['foo'], + esKuery.nodeTypes.function.buildNode('is', `foo.attributes.title`, 'best', true), + mockMappings + ) + ).toEqual(esKuery.fromKueryExpression('foo.title: "best"')); + }); + test('Validate a simple KQL expression filter', () => { expect( validateConvertFilterToKueryNode(['foo'], 'foo.attributes.title: "best"', mockMappings) ).toEqual(esKuery.fromKueryExpression('foo.title: "best"')); diff --git a/src/core/server/saved_objects/service/lib/filter_utils.ts b/src/core/server/saved_objects/service/lib/filter_utils.ts index 5fbe62a074b29..d19f06d74e419 100644 --- a/src/core/server/saved_objects/service/lib/filter_utils.ts +++ b/src/core/server/saved_objects/service/lib/filter_utils.ts @@ -28,11 +28,12 @@ const astFunctionType = ['is', 'range', 'nested']; export const validateConvertFilterToKueryNode = ( allowedTypes: string[], - filter: string, + filter: string | KueryNode, indexMapping: IndexMapping ): KueryNode | undefined => { - if (filter && filter.length > 0 && indexMapping) { - const filterKueryNode = esKuery.fromKueryExpression(filter); + if (filter && indexMapping) { + const filterKueryNode = + typeof filter === 'string' ? esKuery.fromKueryExpression(filter) : filter; const validationFilterKuery = validateFilterKueryNode({ astFilter: filterKueryNode, diff --git a/src/core/server/saved_objects/service/lib/repository.test.js b/src/core/server/saved_objects/service/lib/repository.test.js index 39433981dfd59..f2e3b3e633cd6 100644 --- a/src/core/server/saved_objects/service/lib/repository.test.js +++ b/src/core/server/saved_objects/service/lib/repository.test.js @@ -25,6 +25,8 @@ import { encodeHitVersion } from '../../version'; import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry'; import { DocumentMigrator } from '../../migrations/core/document_migrator'; import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; +// eslint-disable-next-line @kbn/eslint/no-restricted-paths +import { nodeTypes } from '../../../../../plugins/data/common/es_query'; jest.mock('./search_dsl/search_dsl', () => ({ getSearchDsl: jest.fn() })); @@ -151,7 +153,6 @@ describe('SavedObjectsRepository', () => { typeRegistry: registry, kibanaVersion: '2.0.0', log: {}, - validateDoc: jest.fn(), }); const getMockGetResponse = ({ type, id, references, namespace, originId }) => ({ @@ -2529,7 +2530,7 @@ describe('SavedObjectsRepository', () => { expect(getSearchDslNS.getSearchDsl).toHaveBeenCalledWith(mappings, registry, relevantOpts); }); - it(`accepts KQL filter and passes kueryNode to getSearchDsl`, async () => { + it(`accepts KQL expression filter and passes KueryNode to getSearchDsl`, async () => { const findOpts = { namespace, search: 'foo*', @@ -2570,6 +2571,47 @@ describe('SavedObjectsRepository', () => { `); }); + it(`accepts KQL KueryNode filter and passes KueryNode to getSearchDsl`, async () => { + const findOpts = { + namespace, + search: 'foo*', + searchFields: ['foo'], + type: ['dashboard'], + sortField: 'name', + sortOrder: 'desc', + defaultSearchOperator: 'AND', + hasReference: { + type: 'foo', + id: '1', + }, + indexPattern: undefined, + filter: nodeTypes.function.buildNode('is', `dashboard.attributes.otherField`, '*'), + }; + + await findSuccess(findOpts, namespace); + const { kueryNode } = getSearchDslNS.getSearchDsl.mock.calls[0][2]; + expect(kueryNode).toMatchInlineSnapshot(` + Object { + "arguments": Array [ + Object { + "type": "literal", + "value": "dashboard.otherField", + }, + Object { + "type": "wildcard", + "value": "@kuery-wildcard@", + }, + Object { + "type": "literal", + "value": false, + }, + ], + "function": "is", + "type": "function", + } + `); + }); + it(`supports multiple types`, async () => { const types = ['config', 'index-pattern']; await findSuccess({ type: types }); diff --git a/src/core/server/saved_objects/service/lib/repository.ts b/src/core/server/saved_objects/service/lib/repository.ts index dd25989725f3e..e3fb7d2306469 100644 --- a/src/core/server/saved_objects/service/lib/repository.ts +++ b/src/core/server/saved_objects/service/lib/repository.ts @@ -31,7 +31,7 @@ import { getSearchDsl } from './search_dsl'; import { includedFields } from './included_fields'; import { SavedObjectsErrorHelpers, DecoratedError } from './errors'; import { decodeRequestVersion, encodeVersion, encodeHitVersion } from '../../version'; -import { KibanaMigrator } from '../../migrations'; +import { IKibanaMigrator } from '../../migrations'; import { SavedObjectsSerializer, SavedObjectSanitizedDoc, @@ -85,7 +85,7 @@ export interface SavedObjectsRepositoryOptions { client: ElasticsearchClient; typeRegistry: SavedObjectTypeRegistry; serializer: SavedObjectsSerializer; - migrator: KibanaMigrator; + migrator: IKibanaMigrator; allowedTypes: string[]; } @@ -120,7 +120,7 @@ export type ISavedObjectsRepository = Pick) => { path: string; uiCapabilitiesPath: string }; } - -/** - * @internal - * @deprecated - */ -export interface SavedObjectsLegacyUiExports { - savedObjectMappings: SavedObjectsLegacyMapping[]; - savedObjectMigrations: SavedObjectsLegacyMigrationDefinitions; - savedObjectSchemas: SavedObjectsLegacySchemaDefinitions; - savedObjectValidations: PropertyValidators; - savedObjectsManagement: SavedObjectsLegacyManagementDefinition; -} - -/** - * @internal - * @deprecated - */ -export interface SavedObjectsLegacyMapping { - pluginId: string; - properties: SavedObjectsTypeMappingDefinitions; -} - -/** - * @internal - * @deprecated Use {@link SavedObjectsTypeManagementDefinition | management definition} when registering - * from new platform plugins - */ -export interface SavedObjectsLegacyManagementDefinition { - [key: string]: SavedObjectsLegacyManagementTypeDefinition; -} - -/** - * @internal - * @deprecated - */ -export interface SavedObjectsLegacyManagementTypeDefinition { - isImportableAndExportable?: boolean; - defaultSearchField?: string; - icon?: string; - getTitle?: (savedObject: SavedObject) => string; - getEditUrl?: (savedObject: SavedObject) => string; - getInAppUrl?: (savedObject: SavedObject) => { path: string; uiCapabilitiesPath: string }; -} - -/** - * @internal - * @deprecated - */ -export interface SavedObjectsLegacyMigrationDefinitions { - [type: string]: SavedObjectLegacyMigrationMap; -} - -/** - * @internal - * @deprecated - */ -export interface SavedObjectLegacyMigrationMap { - [version: string]: SavedObjectLegacyMigrationFn; -} - -/** - * @internal - * @deprecated - */ -export type SavedObjectLegacyMigrationFn = ( - doc: SavedObjectUnsanitizedDoc, - log: SavedObjectsMigrationLogger -) => SavedObjectUnsanitizedDoc; - -/** - * @internal - * @deprecated - */ -interface SavedObjectsLegacyTypeSchema { - isNamespaceAgnostic?: boolean; - /** Cannot be used in conjunction with `isNamespaceAgnostic` */ - multiNamespace?: boolean; - hidden?: boolean; - indexPattern?: ((config: LegacyConfig) => string) | string; - convertToAliasScript?: string; -} - -/** - * @internal - * @deprecated - */ -export interface SavedObjectsLegacySchemaDefinitions { - [type: string]: SavedObjectsLegacyTypeSchema; -} diff --git a/src/core/server/saved_objects/utils.test.ts b/src/core/server/saved_objects/utils.test.ts deleted file mode 100644 index 21229bee489c2..0000000000000 --- a/src/core/server/saved_objects/utils.test.ts +++ /dev/null @@ -1,445 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { legacyServiceMock } from '../legacy/legacy_service.mock'; -import { convertLegacyTypes, convertTypesToLegacySchema } from './utils'; -import { SavedObjectsLegacyUiExports, SavedObjectsType } from './types'; -import { LegacyConfig, SavedObjectMigrationContext } from 'kibana/server'; -import { SavedObjectUnsanitizedDoc } from './serialization'; - -describe('convertLegacyTypes', () => { - let legacyConfig: ReturnType; - - beforeEach(() => { - legacyConfig = legacyServiceMock.createLegacyConfig(); - }); - - it('converts the legacy mappings using default values if no schemas are specified', () => { - const uiExports: SavedObjectsLegacyUiExports = { - savedObjectMappings: [ - { - pluginId: 'pluginA', - properties: { - typeA: { - properties: { - fieldA: { type: 'text' }, - }, - }, - typeB: { - properties: { - fieldB: { type: 'text' }, - }, - }, - }, - }, - { - pluginId: 'pluginB', - properties: { - typeC: { - properties: { - fieldC: { type: 'text' }, - }, - }, - }, - }, - ], - savedObjectMigrations: {}, - savedObjectSchemas: {}, - savedObjectValidations: {}, - savedObjectsManagement: {}, - }; - - const converted = convertLegacyTypes(uiExports, legacyConfig); - expect(converted).toMatchSnapshot(); - }); - - it('merges the mappings and the schema to create the type when schema exists for the type', () => { - const uiExports: SavedObjectsLegacyUiExports = { - savedObjectMappings: [ - { - pluginId: 'pluginA', - properties: { - typeA: { - properties: { - fieldA: { type: 'text' }, - }, - }, - }, - }, - { - pluginId: 'pluginB', - properties: { - typeB: { - properties: { - fieldB: { type: 'text' }, - }, - }, - }, - }, - { - pluginId: 'pluginC', - properties: { - typeC: { - properties: { - fieldC: { type: 'text' }, - }, - }, - }, - }, - { - pluginId: 'pluginD', - properties: { - typeD: { - properties: { - fieldD: { type: 'text' }, - }, - }, - }, - }, - ], - savedObjectMigrations: {}, - savedObjectSchemas: { - typeA: { - indexPattern: 'fooBar', - hidden: true, - isNamespaceAgnostic: true, - }, - typeB: { - indexPattern: 'barBaz', - hidden: false, - multiNamespace: true, - }, - typeD: { - indexPattern: 'bazQux', - hidden: false, - // if both isNamespaceAgnostic and multiNamespace are true, the resulting namespaceType is 'agnostic' - isNamespaceAgnostic: true, - multiNamespace: true, - }, - }, - savedObjectValidations: {}, - savedObjectsManagement: {}, - }; - - const converted = convertLegacyTypes(uiExports, legacyConfig); - expect(converted).toMatchSnapshot(); - }); - - it('invokes indexPattern to retrieve the index when it is a function', () => { - const indexPatternAccessor: (config: LegacyConfig) => string = jest.fn((config) => { - config.get('foo.bar'); - return 'myIndex'; - }); - - const uiExports: SavedObjectsLegacyUiExports = { - savedObjectMappings: [ - { - pluginId: 'pluginA', - properties: { - typeA: { - properties: { - fieldA: { type: 'text' }, - }, - }, - }, - }, - ], - savedObjectMigrations: {}, - savedObjectSchemas: { - typeA: { - indexPattern: indexPatternAccessor, - hidden: true, - isNamespaceAgnostic: true, - }, - }, - savedObjectValidations: {}, - savedObjectsManagement: {}, - }; - - const converted = convertLegacyTypes(uiExports, legacyConfig); - - expect(indexPatternAccessor).toHaveBeenCalledWith(legacyConfig); - expect(legacyConfig.get).toHaveBeenCalledWith('foo.bar'); - expect(converted.length).toEqual(1); - expect(converted[0].indexPattern).toEqual('myIndex'); - }); - - it('import migrations from the uiExports', () => { - const migrationsA = { - '1.0.0': jest.fn(), - '2.0.4': jest.fn(), - }; - const migrationsB = { - '1.5.3': jest.fn(), - }; - - const uiExports: SavedObjectsLegacyUiExports = { - savedObjectMappings: [ - { - pluginId: 'pluginA', - properties: { - typeA: { - properties: { - fieldA: { type: 'text' }, - }, - }, - }, - }, - { - pluginId: 'pluginB', - properties: { - typeB: { - properties: { - fieldC: { type: 'text' }, - }, - }, - }, - }, - ], - savedObjectMigrations: { - typeA: migrationsA, - typeB: migrationsB, - }, - savedObjectSchemas: {}, - savedObjectValidations: {}, - savedObjectsManagement: {}, - }; - - const converted = convertLegacyTypes(uiExports, legacyConfig); - expect(converted.length).toEqual(2); - expect(Object.keys(converted[0]!.migrations!)).toEqual(Object.keys(migrationsA)); - expect(Object.keys(converted[1]!.migrations!)).toEqual(Object.keys(migrationsB)); - }); - - it('converts the migration to the new format', () => { - const legacyMigration = jest.fn(); - const migrationsA = { - '1.0.0': legacyMigration, - }; - - const uiExports: SavedObjectsLegacyUiExports = { - savedObjectMappings: [ - { - pluginId: 'pluginA', - properties: { - typeA: { - properties: { - fieldA: { type: 'text' }, - }, - }, - }, - }, - ], - savedObjectMigrations: { - typeA: migrationsA, - }, - savedObjectSchemas: {}, - savedObjectValidations: {}, - savedObjectsManagement: {}, - }; - - const converted = convertLegacyTypes(uiExports, legacyConfig); - expect(Object.keys(converted[0]!.migrations!)).toEqual(['1.0.0']); - - const migration = converted[0]!.migrations!['1.0.0']!; - - const doc = {} as SavedObjectUnsanitizedDoc; - const context = { log: {} } as SavedObjectMigrationContext; - migration(doc, context); - - expect(legacyMigration).toHaveBeenCalledTimes(1); - expect(legacyMigration).toHaveBeenCalledWith(doc, context.log); - }); - - it('imports type management information', () => { - const uiExports: SavedObjectsLegacyUiExports = { - savedObjectMappings: [ - { - pluginId: 'pluginA', - properties: { - typeA: { - properties: { - fieldA: { type: 'text' }, - }, - }, - }, - }, - { - pluginId: 'pluginB', - properties: { - typeB: { - properties: { - fieldB: { type: 'text' }, - }, - }, - typeC: { - properties: { - fieldC: { type: 'text' }, - }, - }, - }, - }, - ], - savedObjectsManagement: { - typeA: { - isImportableAndExportable: true, - icon: 'iconA', - defaultSearchField: 'searchFieldA', - getTitle: (savedObject) => savedObject.id, - }, - typeB: { - isImportableAndExportable: false, - icon: 'iconB', - getEditUrl: (savedObject) => `/some-url/${savedObject.id}`, - getInAppUrl: (savedObject) => ({ path: 'path', uiCapabilitiesPath: 'ui-path' }), - }, - }, - savedObjectMigrations: {}, - savedObjectSchemas: {}, - savedObjectValidations: {}, - }; - - const converted = convertLegacyTypes(uiExports, legacyConfig); - expect(converted.length).toEqual(3); - const [typeA, typeB, typeC] = converted; - - expect(typeA.management).toEqual({ - importableAndExportable: true, - icon: 'iconA', - defaultSearchField: 'searchFieldA', - getTitle: uiExports.savedObjectsManagement.typeA.getTitle, - }); - - expect(typeB.management).toEqual({ - importableAndExportable: false, - icon: 'iconB', - getEditUrl: uiExports.savedObjectsManagement.typeB.getEditUrl, - getInAppUrl: uiExports.savedObjectsManagement.typeB.getInAppUrl, - }); - - expect(typeC.management).toBeUndefined(); - }); - - it('merges everything when all are present', () => { - const uiExports: SavedObjectsLegacyUiExports = { - savedObjectMappings: [ - { - pluginId: 'pluginA', - properties: { - typeA: { - properties: { - fieldA: { type: 'text' }, - }, - }, - typeB: { - properties: { - fieldB: { type: 'text' }, - anotherFieldB: { type: 'boolean' }, - }, - }, - }, - }, - { - pluginId: 'pluginB', - properties: { - typeC: { - properties: { - fieldC: { type: 'text' }, - }, - }, - }, - }, - ], - savedObjectMigrations: { - typeA: { - '1.0.0': jest.fn(), - '2.0.4': jest.fn(), - }, - typeC: { - '1.5.3': jest.fn(), - }, - }, - savedObjectSchemas: { - typeA: { - indexPattern: jest.fn((config) => { - config.get('foo.bar'); - return 'myIndex'; - }), - hidden: true, - isNamespaceAgnostic: true, - }, - typeB: { - convertToAliasScript: 'some alias script', - hidden: false, - }, - }, - savedObjectValidations: {}, - savedObjectsManagement: {}, - }; - - const converted = convertLegacyTypes(uiExports, legacyConfig); - expect(converted).toMatchSnapshot(); - }); -}); - -describe('convertTypesToLegacySchema', () => { - it('converts types to the legacy schema format', () => { - const types: SavedObjectsType[] = [ - { - name: 'typeA', - hidden: false, - namespaceType: 'agnostic', - mappings: { properties: {} }, - convertToAliasScript: 'some script', - }, - { - name: 'typeB', - hidden: true, - namespaceType: 'single', - indexPattern: 'myIndex', - mappings: { properties: {} }, - }, - { - name: 'typeC', - hidden: false, - namespaceType: 'multiple', - mappings: { properties: {} }, - }, - ]; - expect(convertTypesToLegacySchema(types)).toEqual({ - typeA: { - hidden: false, - isNamespaceAgnostic: true, - multiNamespace: false, - convertToAliasScript: 'some script', - }, - typeB: { - hidden: true, - isNamespaceAgnostic: false, - multiNamespace: false, - indexPattern: 'myIndex', - }, - typeC: { - hidden: false, - isNamespaceAgnostic: false, - multiNamespace: true, - }, - }); - }); -}); diff --git a/src/core/server/saved_objects/utils.ts b/src/core/server/saved_objects/utils.ts deleted file mode 100644 index af7c08d1fbfcc..0000000000000 --- a/src/core/server/saved_objects/utils.ts +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { LegacyConfig } from '../legacy'; -import { SavedObjectMigrationMap } from './migrations'; -import { - SavedObjectsNamespaceType, - SavedObjectsType, - SavedObjectsLegacyUiExports, - SavedObjectLegacyMigrationMap, - SavedObjectsLegacyManagementTypeDefinition, - SavedObjectsTypeManagementDefinition, -} from './types'; -import { SavedObjectsSchemaDefinition } from './schema'; - -/** - * Converts the legacy savedObjects mappings, schema, and migrations - * to actual {@link SavedObjectsType | saved object types} - */ -export const convertLegacyTypes = ( - { - savedObjectMappings = [], - savedObjectMigrations = {}, - savedObjectSchemas = {}, - savedObjectsManagement = {}, - }: SavedObjectsLegacyUiExports, - legacyConfig: LegacyConfig -): SavedObjectsType[] => { - return savedObjectMappings.reduce((types, { properties }) => { - return [ - ...types, - ...Object.entries(properties).map(([type, mappings]) => { - const schema = savedObjectSchemas[type]; - const migrations = savedObjectMigrations[type]; - const management = savedObjectsManagement[type]; - const namespaceType = (schema?.isNamespaceAgnostic - ? 'agnostic' - : schema?.multiNamespace - ? 'multiple' - : 'single') as SavedObjectsNamespaceType; - return { - name: type, - hidden: schema?.hidden ?? false, - namespaceType, - mappings, - indexPattern: - typeof schema?.indexPattern === 'function' - ? schema.indexPattern(legacyConfig) - : schema?.indexPattern, - convertToAliasScript: schema?.convertToAliasScript, - migrations: convertLegacyMigrations(migrations ?? {}), - management: management ? convertLegacyTypeManagement(management) : undefined, - }; - }), - ]; - }, [] as SavedObjectsType[]); -}; - -/** - * Convert {@link SavedObjectsType | saved object types} to the legacy {@link SavedObjectsSchemaDefinition | schema} format - */ -export const convertTypesToLegacySchema = ( - types: SavedObjectsType[] -): SavedObjectsSchemaDefinition => { - return types.reduce((schema, type) => { - return { - ...schema, - [type.name]: { - isNamespaceAgnostic: type.namespaceType === 'agnostic', - multiNamespace: type.namespaceType === 'multiple', - hidden: type.hidden, - indexPattern: type.indexPattern, - convertToAliasScript: type.convertToAliasScript, - }, - }; - }, {} as SavedObjectsSchemaDefinition); -}; - -const convertLegacyMigrations = ( - legacyMigrations: SavedObjectLegacyMigrationMap -): SavedObjectMigrationMap => { - return Object.entries(legacyMigrations).reduce((migrated, [version, migrationFn]) => { - return { - ...migrated, - [version]: (doc, context) => migrationFn(doc, context.log), - }; - }, {} as SavedObjectMigrationMap); -}; - -const convertLegacyTypeManagement = ( - legacyTypeManagement: SavedObjectsLegacyManagementTypeDefinition -): SavedObjectsTypeManagementDefinition => { - return { - importableAndExportable: legacyTypeManagement.isImportableAndExportable, - defaultSearchField: legacyTypeManagement.defaultSearchField, - icon: legacyTypeManagement.icon, - getTitle: legacyTypeManagement.getTitle, - getEditUrl: legacyTypeManagement.getEditUrl, - getInAppUrl: legacyTypeManagement.getInAppUrl, - }; -}; diff --git a/src/core/server/saved_objects/validation/index.ts b/src/core/server/saved_objects/validation/index.ts deleted file mode 100644 index b1b33f91d3fd4..0000000000000 --- a/src/core/server/saved_objects/validation/index.ts +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * This is the core logic for validating saved object properties. The saved object client - * and migrations consume this in order to validate saved object documents prior to - * persisting them. - */ - -interface SavedObjectDoc { - type: string; - [prop: string]: any; -} - -/** - * A dictionary of property name -> validation function. The property name - * is generally the document's type (e.g. "dashboard"), but will also - * match other properties. - * - * For example, the "acl" and "dashboard" validators both apply to the - * following saved object: { type: "dashboard", attributes: {}, acl: "sdlaj3w" } - * - * @export - * @interface Validators - */ -export interface PropertyValidators { - [prop: string]: ValidateDoc; -} - -export type ValidateDoc = (doc: SavedObjectDoc) => void; - -/** - * Creates a function which uses a dictionary of property validators to validate - * individual saved object documents. - * - * @export - * @param {Validators} validators - * @param {SavedObjectDoc} doc - */ -export function docValidator(validators: PropertyValidators = {}): ValidateDoc { - return function validateDoc(doc: SavedObjectDoc) { - Object.keys(doc) - .concat(doc.type) - .forEach((prop) => { - const validator = validators[prop]; - if (validator) { - validator(doc); - } - }); - }; -} diff --git a/src/core/server/saved_objects/validation/readme.md b/src/core/server/saved_objects/validation/readme.md deleted file mode 100644 index 3b9f17c37fd0b..0000000000000 --- a/src/core/server/saved_objects/validation/readme.md +++ /dev/null @@ -1,63 +0,0 @@ -# Saved Object Validations - -The saved object client supports validation of documents during create / bulkCreate operations. - -This allows us tighter control over what documents get written to the saved object index, and helps us keep the index in a healthy state. - -## Creating validations - -Plugin authors can write their own validations by adding a `validations` property to their uiExports. A validation is nothing more than a dictionary of `{[prop: string]: validationFunction}` where: - -* `prop` - a root-property on a saved object document -* `validationFunction` - a function that takes a document and throws an error if it does not meet expectations. - -## Example - -```js -// In myFanciPlugin... -uiExports: { - validations: { - myProperty(doc) { - if (doc.attributes.someField === undefined) { - throw new Error(`Document ${doc.id} did not define "someField"`); - } - }, - - someOtherProp(doc) { - if (doc.attributes.counter < 0) { - throw new Error(`Document ${doc.id} cannot have a negative counter.`); - } - }, - }, -}, -``` - -In this example, `myFanciPlugin` defines validations for two properties: `myProperty` and `someOtherProp`. - -This means that no other plugin can define validations for myProperty or someOtherProp. - -The `myProperty` validation would run for any doc that has a `type="myProperty"` or for any doc that has a root-level property of `myProperty`. e.g. it would apply to all documents in the following array: - -```js -[ - { - type: 'foo', - attributes: { stuff: 'here' }, - myProperty: 'shazm!', - }, - { - type: 'myProperty', - attributes: { shazm: true }, - }, -]; -``` - -Validating properties other than just 'type' allows us to support potential future saved object scenarios in which plugins might want to annotate other plugin documents, such as a security plugin adding an acl to another document: - -```js -{ - type: 'dashboard', - attributes: { stuff: 'here' }, - acl: '342343', -} -``` diff --git a/src/core/server/saved_objects/validation/validation.test.ts b/src/core/server/saved_objects/validation/validation.test.ts deleted file mode 100644 index 71e220280ba5f..0000000000000 --- a/src/core/server/saved_objects/validation/validation.test.ts +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { docValidator } from './index'; - -describe('docValidator', () => { - test('does not run validators that have no application to the doc', () => { - const validators = { - foo: () => { - throw new Error('Boom!'); - }, - }; - expect(() => docValidator(validators)({ type: 'shoo', bar: 'hi' })).not.toThrow(); - }); - - test('validates the doc type', () => { - const validators = { - foo: () => { - throw new Error('Boom!'); - }, - }; - expect(() => docValidator(validators)({ type: 'foo' })).toThrow(/Boom!/); - }); - - test('validates various props', () => { - const validators = { - a: jest.fn(), - b: jest.fn(), - c: jest.fn(), - }; - docValidator(validators)({ type: 'a', b: 'foo' }); - - expect(validators.c).not.toHaveBeenCalled(); - - expect(validators.a.mock.calls).toEqual([[{ type: 'a', b: 'foo' }]]); - expect(validators.b.mock.calls).toEqual([[{ type: 'a', b: 'foo' }]]); - }); -}); diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md index 49c97d837579d..aef1bda9ccf4e 100644 --- a/src/core/server/server.api.md +++ b/src/core/server/server.api.md @@ -40,6 +40,7 @@ import { DeleteScriptParams } from 'elasticsearch'; import { DeleteTemplateParams } from 'elasticsearch'; import { DetailedPeerCertificate } from 'tls'; import { Duration } from 'moment'; +import { ErrorToastOptions } from 'src/core/public/notifications'; import { ExistsParams } from 'elasticsearch'; import { ExplainParams } from 'elasticsearch'; import { FieldStatsParams } from 'elasticsearch'; @@ -118,6 +119,7 @@ import { RenderSearchTemplateParams } from 'elasticsearch'; import { Request } from 'hapi'; import { ResponseObject } from 'hapi'; import { ResponseToolkit } from 'hapi'; +import { SavedObject as SavedObject_2 } from 'src/core/server'; import { SchemaTypeError } from '@kbn/config-schema'; import { ScrollParams } from 'elasticsearch'; import { SearchParams } from 'elasticsearch'; @@ -141,6 +143,7 @@ import { TasksCancelParams } from 'elasticsearch'; import { TasksGetParams } from 'elasticsearch'; import { TasksListParams } from 'elasticsearch'; import { TermvectorsParams } from 'elasticsearch'; +import { ToastInputFields } from 'src/core/public/notifications'; import { TransportRequestOptions } from '@elastic/elasticsearch/lib/Transport'; import { TransportRequestParams } from '@elastic/elasticsearch/lib/Transport'; import { TransportRequestPromise } from '@elastic/elasticsearch/lib/Transport'; @@ -150,10 +153,12 @@ import { UpdateDocumentByQueryParams } from 'elasticsearch'; import { UpdateDocumentParams } from 'elasticsearch'; import { Url } from 'url'; -// Warning: (ae-forgotten-export) The symbol "appendersSchema" needs to be exported by the entry point index.d.ts +// Warning: (ae-forgotten-export) The symbol "ConsoleAppenderConfig" needs to be exported by the entry point index.d.ts +// Warning: (ae-forgotten-export) The symbol "FileAppenderConfig" needs to be exported by the entry point index.d.ts +// Warning: (ae-forgotten-export) The symbol "LegacyAppenderConfig" needs to be exported by the entry point index.d.ts // // @public (undocumented) -export type AppenderConfigType = TypeOf; +export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig; // @public export function assertNever(x: never): never; @@ -322,108 +327,45 @@ export type CapabilitiesSwitcher = (request: KibanaRequest, uiCapabilities: Capa export const config: { elasticsearch: { schema: import("@kbn/config-schema").ObjectType<{ - sniffOnStart: import("@kbn/config-schema").Type; - sniffInterval: import("@kbn/config-schema").Type; - sniffOnConnectionFault: import("@kbn/config-schema").Type; - hosts: import("@kbn/config-schema").Type; - preserveHost: import("@kbn/config-schema").Type; - username: import("@kbn/config-schema").Type; - password: import("@kbn/config-schema").Type; - requestHeadersWhitelist: import("@kbn/config-schema").Type; - customHeaders: import("@kbn/config-schema").Type>; - shardTimeout: import("@kbn/config-schema").Type; - requestTimeout: import("@kbn/config-schema").Type; - pingTimeout: import("@kbn/config-schema").Type; - startupTimeout: import("@kbn/config-schema").Type; - logQueries: import("@kbn/config-schema").Type; + sniffOnStart: Type; + sniffInterval: Type; + sniffOnConnectionFault: Type; + hosts: Type; + preserveHost: Type; + username: Type; + password: Type; + requestHeadersWhitelist: Type; + customHeaders: Type>; + shardTimeout: Type; + requestTimeout: Type; + pingTimeout: Type; + startupTimeout: Type; + logQueries: Type; ssl: import("@kbn/config-schema").ObjectType<{ - verificationMode: import("@kbn/config-schema").Type<"none" | "certificate" | "full">; - certificateAuthorities: import("@kbn/config-schema").Type; - certificate: import("@kbn/config-schema").Type; - key: import("@kbn/config-schema").Type; - keyPassphrase: import("@kbn/config-schema").Type; + verificationMode: Type<"none" | "certificate" | "full">; + certificateAuthorities: Type; + certificate: Type; + key: Type; + keyPassphrase: Type; keystore: import("@kbn/config-schema").ObjectType<{ - path: import("@kbn/config-schema").Type; - password: import("@kbn/config-schema").Type; + path: Type; + password: Type; }>; truststore: import("@kbn/config-schema").ObjectType<{ - path: import("@kbn/config-schema").Type; - password: import("@kbn/config-schema").Type; + path: Type; + password: Type; }>; - alwaysPresentCertificate: import("@kbn/config-schema").Type; + alwaysPresentCertificate: Type; }>; - apiVersion: import("@kbn/config-schema").Type; + apiVersion: Type; healthCheck: import("@kbn/config-schema").ObjectType<{ - delay: import("@kbn/config-schema").Type; + delay: Type; }>; ignoreVersionMismatch: import("@kbn/config-schema/target/types/types").ConditionalType; }>; }; logging: { - appenders: import("@kbn/config-schema").Type | Readonly<{ - pattern?: string | undefined; - highlight?: boolean | undefined; - } & { - kind: "pattern"; - }>; - kind: "console"; - }> | Readonly<{} & { - path: string; - layout: Readonly<{} & { - kind: "json"; - }> | Readonly<{ - pattern?: string | undefined; - highlight?: boolean | undefined; - } & { - kind: "pattern"; - }>; - kind: "file"; - }> | Readonly<{ - legacyLoggingConfig?: any; - } & { - kind: "legacy-appender"; - }>>; - loggers: import("@kbn/config-schema").ObjectType<{ - appenders: import("@kbn/config-schema").Type; - context: import("@kbn/config-schema").Type; - level: import("@kbn/config-schema").Type; - }>; - loggerContext: import("@kbn/config-schema").ObjectType<{ - appenders: import("@kbn/config-schema").Type | Readonly<{ - pattern?: string | undefined; - highlight?: boolean | undefined; - } & { - kind: "pattern"; - }>; - kind: "console"; - }> | Readonly<{} & { - path: string; - layout: Readonly<{} & { - kind: "json"; - }> | Readonly<{ - pattern?: string | undefined; - highlight?: boolean | undefined; - } & { - kind: "pattern"; - }>; - kind: "file"; - }> | Readonly<{ - legacyLoggingConfig?: any; - } & { - kind: "legacy-appender"; - }>>>; - loggers: import("@kbn/config-schema").Type[]>; - }>; + appenders: Type; }; }; @@ -1469,19 +1411,30 @@ export interface LegacyServiceStartDeps { plugins: Record; } -// Warning: (ae-forgotten-export) The symbol "SavedObjectsLegacyUiExports" needs to be exported by the entry point index.d.ts -// // @internal @deprecated (undocumented) -export type LegacyUiExports = SavedObjectsLegacyUiExports & { +export interface LegacyUiExports { + // Warning: (ae-forgotten-export) The symbol "VarsProvider" needs to be exported by the entry point index.d.ts + // + // (undocumented) defaultInjectedVarProviders?: VarsProvider[]; + // Warning: (ae-forgotten-export) The symbol "VarsReplacer" needs to be exported by the entry point index.d.ts + // + // (undocumented) injectedVarsReplacers?: VarsReplacer[]; + // Warning: (ae-forgotten-export) The symbol "LegacyNavLinkSpec" needs to be exported by the entry point index.d.ts + // + // (undocumented) navLinkSpecs?: LegacyNavLinkSpec[] | null; + // Warning: (ae-forgotten-export) The symbol "LegacyAppSpec" needs to be exported by the entry point index.d.ts + // + // (undocumented) uiAppSpecs?: Array; + // (undocumented) unknown?: [{ pluginSpec: LegacyPluginSpec; type: unknown; }]; -}; +} // Warning: (ae-forgotten-export) The symbol "lifecycleResponseFactory" needs to be exported by the entry point index.d.ts // @@ -1578,10 +1531,10 @@ export interface LogRecord { timestamp: Date; } -// Warning: (ae-missing-release-tag) "MetricsServiceSetup" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public (undocumented) +// @public export interface MetricsServiceSetup { + readonly collectionInterval: number; + getOpsMetrics$: () => Observable; } // @public @deprecated (undocumented) @@ -1668,6 +1621,7 @@ export interface OnPreRoutingToolkit { // @public export interface OpsMetrics { + collected_at: Date; concurrent_connections: OpsServerMetrics['concurrent_connections']; os: OpsOsMetrics; process: OpsProcessMetrics; @@ -1677,6 +1631,20 @@ export interface OpsMetrics { // @public export interface OpsOsMetrics { + cpu?: { + control_group: string; + cfs_period_micros: number; + cfs_quota_micros: number; + stat: { + number_of_elapsed_periods: number; + number_of_times_throttled: number; + time_throttled_nanos: number; + }; + }; + cpuacct?: { + control_group: string; + usage_nanos: number; + }; distro?: string; distroRelease?: string; load: { @@ -2320,8 +2288,10 @@ export interface SavedObjectsFindOptions { // (undocumented) defaultSearchOperator?: 'AND' | 'OR'; fields?: string[]; + // Warning: (ae-forgotten-export) The symbol "KueryNode" needs to be exported by the entry point index.d.ts + // // (undocumented) - filter?: string; + filter?: string | KueryNode; // (undocumented) hasReference?: { type: string; @@ -2493,33 +2463,6 @@ export interface SavedObjectsIncrementCounterOptions extends SavedObjectsBaseOpt refresh?: MutatingOperationRefreshSetting; } -// @internal @deprecated (undocumented) -export interface SavedObjectsLegacyService { - // Warning: (ae-forgotten-export) The symbol "SavedObjectsClientProvider" needs to be exported by the entry point index.d.ts - // - // (undocumented) - addScopedSavedObjectsClientWrapperFactory: SavedObjectsClientProvider['addClientWrapperFactory']; - // (undocumented) - getSavedObjectsRepository(...rest: any[]): any; - // (undocumented) - getScopedSavedObjectsClient: SavedObjectsClientProvider['getClient']; - // (undocumented) - importExport: { - objectLimit: number; - importSavedObjects(options: SavedObjectsImportOptions): Promise; - resolveImportErrors(options: SavedObjectsResolveImportErrorsOptions): Promise; - getSortedObjectsForExport(options: SavedObjectsExportOptions): Promise; - }; - // (undocumented) - SavedObjectsClient: typeof SavedObjectsClient; - // (undocumented) - schema: SavedObjectsSchema; - // (undocumented) - setScopedSavedObjectsClientFactory: SavedObjectsClientProvider['setClientFactory']; - // (undocumented) - types: string[]; -} - // @public export interface SavedObjectsMappingProperties { // (undocumented) @@ -2573,10 +2516,10 @@ export class SavedObjectsRepository { bulkUpdate(objects: Array>, options?: SavedObjectsBulkUpdateOptions): Promise>; checkConflicts(objects?: SavedObjectsCheckConflictsObject[], options?: SavedObjectsBaseOptions): Promise; create(type: string, attributes: T, options?: SavedObjectsCreateOptions): Promise>; - // Warning: (ae-forgotten-export) The symbol "KibanaMigrator" needs to be exported by the entry point index.d.ts + // Warning: (ae-forgotten-export) The symbol "IKibanaMigrator" needs to be exported by the entry point index.d.ts // // @internal - static createRepository(migrator: KibanaMigrator, typeRegistry: SavedObjectTypeRegistry, indexName: string, client: ElasticsearchClient, includedHiddenTypes?: string[], injectedConstructor?: any): ISavedObjectsRepository; + static createRepository(migrator: IKibanaMigrator, typeRegistry: SavedObjectTypeRegistry, indexName: string, client: ElasticsearchClient, includedHiddenTypes?: string[], injectedConstructor?: any): ISavedObjectsRepository; delete(type: string, id: string, options?: SavedObjectsDeleteOptions): Promise<{}>; deleteByNamespace(namespace: string, options?: SavedObjectsDeleteByNamespaceOptions): Promise; deleteFromNamespaces(type: string, id: string, namespaces: string[], options?: SavedObjectsDeleteFromNamespacesOptions): Promise; @@ -2604,24 +2547,6 @@ export interface SavedObjectsResolveImportErrorsOptions { typeRegistry: ISavedObjectTypeRegistry; } -// @internal @deprecated (undocumented) -export class SavedObjectsSchema { - // Warning: (ae-forgotten-export) The symbol "SavedObjectsSchemaDefinition" needs to be exported by the entry point index.d.ts - constructor(schemaDefinition?: SavedObjectsSchemaDefinition); - // (undocumented) - getConvertToAliasScript(type: string): string | undefined; - // (undocumented) - getIndexForType(config: LegacyConfig, type: string): string | undefined; - // (undocumented) - isHiddenType(type: string): boolean; - // (undocumented) - isMultiNamespace(type: string): boolean; - // (undocumented) - isNamespaceAgnostic(type: string): boolean; - // (undocumented) - isSingleNamespace(type: string): boolean; -} - // @public export class SavedObjectsSerializer { // @internal @@ -2853,10 +2778,17 @@ export type SharedGlobalConfig = RecursiveReadonly<{ // @public export type StartServicesAccessor = () => Promise<[CoreStart, TPluginsStart, TStart]>; +// Warning: (ae-unresolved-link) The @link reference could not be resolved: The package "kibana" does not have an export "ServiceStatusSetup" +// Warning: (ae-unresolved-link) The @link reference could not be resolved: The package "kibana" does not have an export "ServiceStatusSetup" +// // @public export interface StatusServiceSetup { core$: Observable; + dependencies$: Observable>; + // Warning: (ae-unresolved-link) The @link reference could not be resolved: The package "kibana" does not have an export "StatusSetup" + derivedStatus$: Observable; overall$: Observable; + set(status$: Observable): void; } // @public @@ -2944,13 +2876,9 @@ export const validBodyOutput: readonly ["data", "stream"]; // Warnings were encountered during analysis: // // src/core/server/http/router/response.ts:316:3 - (ae-forgotten-export) The symbol "KibanaResponse" needs to be exported by the entry point index.d.ts -// src/core/server/legacy/types.ts:163:3 - (ae-forgotten-export) The symbol "VarsProvider" needs to be exported by the entry point index.d.ts -// src/core/server/legacy/types.ts:164:3 - (ae-forgotten-export) The symbol "VarsReplacer" needs to be exported by the entry point index.d.ts -// src/core/server/legacy/types.ts:165:3 - (ae-forgotten-export) The symbol "LegacyNavLinkSpec" needs to be exported by the entry point index.d.ts -// src/core/server/legacy/types.ts:166:3 - (ae-forgotten-export) The symbol "LegacyAppSpec" needs to be exported by the entry point index.d.ts -// src/core/server/legacy/types.ts:167:16 - (ae-forgotten-export) The symbol "LegacyPluginSpec" needs to be exported by the entry point index.d.ts -// src/core/server/plugins/types.ts:266:3 - (ae-forgotten-export) The symbol "KibanaConfigType" needs to be exported by the entry point index.d.ts -// src/core/server/plugins/types.ts:266:3 - (ae-forgotten-export) The symbol "SharedGlobalConfigKeys" needs to be exported by the entry point index.d.ts -// src/core/server/plugins/types.ts:268:3 - (ae-forgotten-export) The symbol "PathConfigType" needs to be exported by the entry point index.d.ts +// src/core/server/legacy/types.ts:135:16 - (ae-forgotten-export) The symbol "LegacyPluginSpec" needs to be exported by the entry point index.d.ts +// src/core/server/plugins/types.ts:272:3 - (ae-forgotten-export) The symbol "KibanaConfigType" needs to be exported by the entry point index.d.ts +// src/core/server/plugins/types.ts:272:3 - (ae-forgotten-export) The symbol "SharedGlobalConfigKeys" needs to be exported by the entry point index.d.ts +// src/core/server/plugins/types.ts:274:3 - (ae-forgotten-export) The symbol "PathConfigType" needs to be exported by the entry point index.d.ts ``` diff --git a/src/core/server/server.test.ts b/src/core/server/server.test.ts index 417f66a2988c2..8bf16d9130ef5 100644 --- a/src/core/server/server.test.ts +++ b/src/core/server/server.test.ts @@ -49,7 +49,7 @@ const rawConfigService = rawConfigServiceMock.create({}); beforeEach(() => { mockConfigService.atPath.mockReturnValue(new BehaviorSubject({ autoListen: true })); mockPluginsService.discover.mockResolvedValue({ - pluginTree: new Map(), + pluginTree: { asOpaqueIds: new Map(), asNames: new Map() }, uiPlugins: { internal: new Map(), public: new Map(), browserConfigs: new Map() }, }); }); @@ -98,7 +98,7 @@ test('injects legacy dependency to context#setup()', async () => { [pluginB, [pluginA]], ]); mockPluginsService.discover.mockResolvedValue({ - pluginTree: pluginDependencies, + pluginTree: { asOpaqueIds: pluginDependencies, asNames: new Map() }, uiPlugins: { internal: new Map(), public: new Map(), browserConfigs: new Map() }, }); diff --git a/src/core/server/server.ts b/src/core/server/server.ts index cc6d8171e7a03..a02b0f51b559f 100644 --- a/src/core/server/server.ts +++ b/src/core/server/server.ts @@ -121,10 +121,13 @@ export class Server { const contextServiceSetup = this.context.setup({ // We inject a fake "legacy plugin" with dependencies on every plugin so that legacy plugins: - // 1) Can access context from any NP plugin + // 1) Can access context from any KP plugin // 2) Can register context providers that will only be available to other legacy plugins and will not leak into // New Platform plugins. - pluginDependencies: new Map([...pluginTree, [this.legacy.legacyId, [...pluginTree.keys()]]]), + pluginDependencies: new Map([ + ...pluginTree.asOpaqueIds, + [this.legacy.legacyId, [...pluginTree.asOpaqueIds.keys()]], + ]), }); const auditTrailSetup = this.auditTrail.setup(); @@ -142,7 +145,6 @@ export class Server { const savedObjectsSetup = await this.savedObjects.setup({ http: httpSetup, elasticsearch: elasticsearchServiceSetup, - legacyPlugins, }); const uiSettingsSetup = await this.uiSettings.setup({ @@ -154,6 +156,7 @@ export class Server { const statusSetup = await this.status.setup({ elasticsearch: elasticsearchServiceSetup, + pluginDependencies: pluginTree.asNames, savedObjects: savedObjectsSetup, }); diff --git a/src/core/server/status/get_summary_status.test.ts b/src/core/server/status/get_summary_status.test.ts index 7516e82ee784d..d97083162b502 100644 --- a/src/core/server/status/get_summary_status.test.ts +++ b/src/core/server/status/get_summary_status.test.ts @@ -94,6 +94,38 @@ describe('getSummaryStatus', () => { describe('summary', () => { describe('when a single service is at highest level', () => { it('returns all information about that single service', () => { + expect( + getSummaryStatus( + Object.entries({ + s1: degraded, + s2: { + level: ServiceStatusLevels.unavailable, + summary: 'Lorem ipsum', + meta: { + custom: { data: 'here' }, + }, + }, + }) + ) + ).toEqual({ + level: ServiceStatusLevels.unavailable, + summary: '[s2]: Lorem ipsum', + detail: 'See the status page for more information', + meta: { + affectedServices: { + s2: { + level: ServiceStatusLevels.unavailable, + summary: 'Lorem ipsum', + meta: { + custom: { data: 'here' }, + }, + }, + }, + }, + }); + }); + + it('allows the single service to override the detail and documentationUrl fields', () => { expect( getSummaryStatus( Object.entries({ @@ -115,7 +147,17 @@ describe('getSummaryStatus', () => { detail: 'Vivamus pulvinar sem ac luctus ultrices.', documentationUrl: 'http://helpmenow.com/problem1', meta: { - custom: { data: 'here' }, + affectedServices: { + s2: { + level: ServiceStatusLevels.unavailable, + summary: 'Lorem ipsum', + detail: 'Vivamus pulvinar sem ac luctus ultrices.', + documentationUrl: 'http://helpmenow.com/problem1', + meta: { + custom: { data: 'here' }, + }, + }, + }, }, }); }); diff --git a/src/core/server/status/get_summary_status.ts b/src/core/server/status/get_summary_status.ts index 748a54f0bf8bb..8d97cdbd9b15b 100644 --- a/src/core/server/status/get_summary_status.ts +++ b/src/core/server/status/get_summary_status.ts @@ -23,62 +23,60 @@ import { ServiceStatus, ServiceStatusLevels, ServiceStatusLevel } from './types' * Returns a single {@link ServiceStatus} that summarizes the most severe status level from a group of statuses. * @param statuses */ -export const getSummaryStatus = (statuses: Array<[string, ServiceStatus]>): ServiceStatus => { - const grouped = groupByLevel(statuses); - const highestSeverityLevel = getHighestSeverityLevel(grouped.keys()); - const highestSeverityGroup = grouped.get(highestSeverityLevel)!; +export const getSummaryStatus = ( + statuses: Array<[string, ServiceStatus]>, + { allAvailableSummary = `All services are available` }: { allAvailableSummary?: string } = {} +): ServiceStatus => { + const { highestLevel, highestStatuses } = highestLevelSummary(statuses); - if (highestSeverityLevel === ServiceStatusLevels.available) { + if (highestLevel === ServiceStatusLevels.available) { return { level: ServiceStatusLevels.available, - summary: `All services are available`, + summary: allAvailableSummary, }; - } else if (highestSeverityGroup.size === 1) { - const [serviceName, status] = [...highestSeverityGroup.entries()][0]; + } else if (highestStatuses.length === 1) { + const [serviceName, status] = highestStatuses[0]! as [string, ServiceStatus]; return { ...status, summary: `[${serviceName}]: ${status.summary!}`, + // TODO: include URL to status page + detail: status.detail ?? `See the status page for more information`, + meta: { + affectedServices: { [serviceName]: status }, + }, }; } else { return { - level: highestSeverityLevel, - summary: `[${highestSeverityGroup.size}] services are ${highestSeverityLevel.toString()}`, + level: highestLevel, + summary: `[${highestStatuses.length}] services are ${highestLevel.toString()}`, // TODO: include URL to status page detail: `See the status page for more information`, meta: { - affectedServices: Object.fromEntries([...highestSeverityGroup]), + affectedServices: Object.fromEntries(highestStatuses), }, }; } }; -const groupByLevel = ( - statuses: Array<[string, ServiceStatus]> -): Map> => { - const byLevel = new Map>(); +type StatusPair = [string, ServiceStatus]; - for (const [serviceName, status] of statuses) { - let levelMap = byLevel.get(status.level); - if (!levelMap) { - levelMap = new Map(); - byLevel.set(status.level, levelMap); - } +const highestLevelSummary = ( + statuses: StatusPair[] +): { highestLevel: ServiceStatusLevel; highestStatuses: StatusPair[] } => { + let highestLevel: ServiceStatusLevel = ServiceStatusLevels.available; + let highestStatuses: StatusPair[] = []; - levelMap.set(serviceName, status); + for (const pair of statuses) { + if (pair[1].level === highestLevel) { + highestStatuses.push(pair); + } else if (pair[1].level > highestLevel) { + highestLevel = pair[1].level; + highestStatuses = [pair]; + } } - return byLevel; -}; - -const getHighestSeverityLevel = (levels: Iterable): ServiceStatusLevel => { - const sorted = [...levels].sort((a, b) => { - if (a < b) { - return -1; - } else if (a > b) { - return 1; - } else { - return 0; - } - }); - return sorted[sorted.length - 1] ?? ServiceStatusLevels.available; + return { + highestLevel, + highestStatuses, + }; }; diff --git a/src/core/server/status/plugins_status.test.ts b/src/core/server/status/plugins_status.test.ts new file mode 100644 index 0000000000000..a75dc8c283698 --- /dev/null +++ b/src/core/server/status/plugins_status.test.ts @@ -0,0 +1,338 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { PluginName } from '../plugins'; +import { PluginsStatusService } from './plugins_status'; +import { of, Observable, BehaviorSubject } from 'rxjs'; +import { ServiceStatusLevels, CoreStatus, ServiceStatus } from './types'; +import { first } from 'rxjs/operators'; +import { ServiceStatusLevelSnapshotSerializer } from './test_utils'; + +expect.addSnapshotSerializer(ServiceStatusLevelSnapshotSerializer); + +describe('PluginStatusService', () => { + const coreAllAvailable$: Observable = of({ + elasticsearch: { level: ServiceStatusLevels.available, summary: 'elasticsearch avail' }, + savedObjects: { level: ServiceStatusLevels.available, summary: 'savedObjects avail' }, + }); + const coreOneDegraded$: Observable = of({ + elasticsearch: { level: ServiceStatusLevels.available, summary: 'elasticsearch avail' }, + savedObjects: { level: ServiceStatusLevels.degraded, summary: 'savedObjects degraded' }, + }); + const coreOneCriticalOneDegraded$: Observable = of({ + elasticsearch: { level: ServiceStatusLevels.critical, summary: 'elasticsearch critical' }, + savedObjects: { level: ServiceStatusLevels.degraded, summary: 'savedObjects degraded' }, + }); + const pluginDependencies: Map = new Map([ + ['a', []], + ['b', ['a']], + ['c', ['a', 'b']], + ]); + + describe('getDerivedStatus$', () => { + it(`defaults to core's most severe status`, async () => { + const serviceAvailable = new PluginsStatusService({ + core$: coreAllAvailable$, + pluginDependencies, + }); + expect(await serviceAvailable.getDerivedStatus$('a').pipe(first()).toPromise()).toEqual({ + level: ServiceStatusLevels.available, + summary: 'All dependencies are available', + }); + + const serviceDegraded = new PluginsStatusService({ + core$: coreOneDegraded$, + pluginDependencies, + }); + expect(await serviceDegraded.getDerivedStatus$('a').pipe(first()).toPromise()).toEqual({ + level: ServiceStatusLevels.degraded, + summary: '[savedObjects]: savedObjects degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }); + + const serviceCritical = new PluginsStatusService({ + core$: coreOneCriticalOneDegraded$, + pluginDependencies, + }); + expect(await serviceCritical.getDerivedStatus$('a').pipe(first()).toPromise()).toEqual({ + level: ServiceStatusLevels.critical, + summary: '[elasticsearch]: elasticsearch critical', + detail: 'See the status page for more information', + meta: expect.any(Object), + }); + }); + + it(`provides a summary status when core and dependencies are at same severity level`, async () => { + const service = new PluginsStatusService({ core$: coreOneDegraded$, pluginDependencies }); + service.set('a', of({ level: ServiceStatusLevels.degraded, summary: 'a is degraded' })); + expect(await service.getDerivedStatus$('b').pipe(first()).toPromise()).toEqual({ + level: ServiceStatusLevels.degraded, + summary: '[2] services are degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }); + }); + + it(`allows dependencies status to take precedence over lower severity core statuses`, async () => { + const service = new PluginsStatusService({ core$: coreOneDegraded$, pluginDependencies }); + service.set('a', of({ level: ServiceStatusLevels.unavailable, summary: 'a is not working' })); + expect(await service.getDerivedStatus$('b').pipe(first()).toPromise()).toEqual({ + level: ServiceStatusLevels.unavailable, + summary: '[a]: a is not working', + detail: 'See the status page for more information', + meta: expect.any(Object), + }); + }); + + it(`allows core status to take precedence over lower severity dependencies statuses`, async () => { + const service = new PluginsStatusService({ + core$: coreOneCriticalOneDegraded$, + pluginDependencies, + }); + service.set('a', of({ level: ServiceStatusLevels.unavailable, summary: 'a is not working' })); + expect(await service.getDerivedStatus$('b').pipe(first()).toPromise()).toEqual({ + level: ServiceStatusLevels.critical, + summary: '[elasticsearch]: elasticsearch critical', + detail: 'See the status page for more information', + meta: expect.any(Object), + }); + }); + + it(`allows a severe dependency status to take precedence over a less severe dependency status`, async () => { + const service = new PluginsStatusService({ core$: coreOneDegraded$, pluginDependencies }); + service.set('a', of({ level: ServiceStatusLevels.degraded, summary: 'a is degraded' })); + service.set('b', of({ level: ServiceStatusLevels.unavailable, summary: 'b is not working' })); + expect(await service.getDerivedStatus$('c').pipe(first()).toPromise()).toEqual({ + level: ServiceStatusLevels.unavailable, + summary: '[b]: b is not working', + detail: 'See the status page for more information', + meta: expect.any(Object), + }); + }); + }); + + describe('getAll$', () => { + it('defaults to empty record if no plugins', async () => { + const service = new PluginsStatusService({ + core$: coreAllAvailable$, + pluginDependencies: new Map(), + }); + expect(await service.getAll$().pipe(first()).toPromise()).toEqual({}); + }); + + it('defaults to core status when no plugin statuses are set', async () => { + const serviceAvailable = new PluginsStatusService({ + core$: coreAllAvailable$, + pluginDependencies, + }); + expect(await serviceAvailable.getAll$().pipe(first()).toPromise()).toEqual({ + a: { level: ServiceStatusLevels.available, summary: 'All dependencies are available' }, + b: { level: ServiceStatusLevels.available, summary: 'All dependencies are available' }, + c: { level: ServiceStatusLevels.available, summary: 'All dependencies are available' }, + }); + + const serviceDegraded = new PluginsStatusService({ + core$: coreOneDegraded$, + pluginDependencies, + }); + expect(await serviceDegraded.getAll$().pipe(first()).toPromise()).toEqual({ + a: { + level: ServiceStatusLevels.degraded, + summary: '[savedObjects]: savedObjects degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + b: { + level: ServiceStatusLevels.degraded, + summary: '[2] services are degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + c: { + level: ServiceStatusLevels.degraded, + summary: '[3] services are degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + }); + + const serviceCritical = new PluginsStatusService({ + core$: coreOneCriticalOneDegraded$, + pluginDependencies, + }); + expect(await serviceCritical.getAll$().pipe(first()).toPromise()).toEqual({ + a: { + level: ServiceStatusLevels.critical, + summary: '[elasticsearch]: elasticsearch critical', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + b: { + level: ServiceStatusLevels.critical, + summary: '[2] services are critical', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + c: { + level: ServiceStatusLevels.critical, + summary: '[3] services are critical', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + }); + }); + + it('uses the manually set status level if plugin specifies one', async () => { + const service = new PluginsStatusService({ core$: coreOneDegraded$, pluginDependencies }); + service.set('a', of({ level: ServiceStatusLevels.available, summary: 'a status' })); + + expect(await service.getAll$().pipe(first()).toPromise()).toEqual({ + a: { level: ServiceStatusLevels.available, summary: 'a status' }, // a is available depsite savedObjects being degraded + b: { + level: ServiceStatusLevels.degraded, + summary: '[savedObjects]: savedObjects degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + c: { + level: ServiceStatusLevels.degraded, + summary: '[2] services are degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + }); + }); + + it('updates when a new plugin status observable is set', async () => { + const service = new PluginsStatusService({ + core$: coreAllAvailable$, + pluginDependencies: new Map([['a', []]]), + }); + const statusUpdates: Array> = []; + const subscription = service + .getAll$() + .subscribe((pluginStatuses) => statusUpdates.push(pluginStatuses)); + + service.set('a', of({ level: ServiceStatusLevels.degraded, summary: 'a degraded' })); + service.set('a', of({ level: ServiceStatusLevels.unavailable, summary: 'a unavailable' })); + service.set('a', of({ level: ServiceStatusLevels.available, summary: 'a available' })); + subscription.unsubscribe(); + + expect(statusUpdates).toEqual([ + { a: { level: ServiceStatusLevels.available, summary: 'All dependencies are available' } }, + { a: { level: ServiceStatusLevels.degraded, summary: 'a degraded' } }, + { a: { level: ServiceStatusLevels.unavailable, summary: 'a unavailable' } }, + { a: { level: ServiceStatusLevels.available, summary: 'a available' } }, + ]); + }); + }); + + describe('getDependenciesStatus$', () => { + it('only includes dependencies of specified plugin', async () => { + const service = new PluginsStatusService({ + core$: coreAllAvailable$, + pluginDependencies, + }); + expect(await service.getDependenciesStatus$('a').pipe(first()).toPromise()).toEqual({}); + expect(await service.getDependenciesStatus$('b').pipe(first()).toPromise()).toEqual({ + a: { level: ServiceStatusLevels.available, summary: 'All dependencies are available' }, + }); + expect(await service.getDependenciesStatus$('c').pipe(first()).toPromise()).toEqual({ + a: { level: ServiceStatusLevels.available, summary: 'All dependencies are available' }, + b: { level: ServiceStatusLevels.available, summary: 'All dependencies are available' }, + }); + }); + + it('uses the manually set status level if plugin specifies one', async () => { + const service = new PluginsStatusService({ core$: coreOneDegraded$, pluginDependencies }); + service.set('a', of({ level: ServiceStatusLevels.available, summary: 'a status' })); + + expect(await service.getDependenciesStatus$('c').pipe(first()).toPromise()).toEqual({ + a: { level: ServiceStatusLevels.available, summary: 'a status' }, // a is available depsite savedObjects being degraded + b: { + level: ServiceStatusLevels.degraded, + summary: '[savedObjects]: savedObjects degraded', + detail: 'See the status page for more information', + meta: expect.any(Object), + }, + }); + }); + + it('throws error if unknown plugin passed', () => { + const service = new PluginsStatusService({ core$: coreAllAvailable$, pluginDependencies }); + expect(() => { + service.getDependenciesStatus$('dont-exist'); + }).toThrowError(); + }); + + it('debounces events in quick succession', async () => { + const service = new PluginsStatusService({ + core$: coreAllAvailable$, + pluginDependencies, + }); + const available: ServiceStatus = { + level: ServiceStatusLevels.available, + summary: 'a available', + }; + const degraded: ServiceStatus = { + level: ServiceStatusLevels.degraded, + summary: 'a degraded', + }; + const pluginA$ = new BehaviorSubject(available); + service.set('a', pluginA$); + + const statusUpdates: Array> = []; + const subscription = service + .getDependenciesStatus$('b') + .subscribe((status) => statusUpdates.push(status)); + const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); + + pluginA$.next(degraded); + pluginA$.next(available); + pluginA$.next(degraded); + pluginA$.next(available); + pluginA$.next(degraded); + pluginA$.next(available); + pluginA$.next(degraded); + // Waiting for the debounce timeout should cut a new update + await delay(500); + pluginA$.next(available); + await delay(500); + subscription.unsubscribe(); + + expect(statusUpdates).toMatchInlineSnapshot(` + Array [ + Object { + "a": Object { + "level": degraded, + "summary": "a degraded", + }, + }, + Object { + "a": Object { + "level": available, + "summary": "a available", + }, + }, + ] + `); + }); + }); +}); diff --git a/src/core/server/status/plugins_status.ts b/src/core/server/status/plugins_status.ts new file mode 100644 index 0000000000000..113d59b327c11 --- /dev/null +++ b/src/core/server/status/plugins_status.ts @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { BehaviorSubject, Observable, combineLatest, of } from 'rxjs'; +import { map, distinctUntilChanged, switchMap, debounceTime } from 'rxjs/operators'; +import { isDeepStrictEqual } from 'util'; + +import { PluginName } from '../plugins'; +import { ServiceStatus, CoreStatus } from './types'; +import { getSummaryStatus } from './get_summary_status'; + +interface Deps { + core$: Observable; + pluginDependencies: ReadonlyMap; +} + +export class PluginsStatusService { + private readonly pluginStatuses = new Map>(); + private readonly update$ = new BehaviorSubject(true); + constructor(private readonly deps: Deps) {} + + public set(plugin: PluginName, status$: Observable) { + this.pluginStatuses.set(plugin, status$); + this.update$.next(true); // trigger all existing Observables to update from the new source Observable + } + + public getAll$(): Observable> { + return this.getPluginStatuses$([...this.deps.pluginDependencies.keys()]); + } + + public getDependenciesStatus$(plugin: PluginName): Observable> { + const dependencies = this.deps.pluginDependencies.get(plugin); + if (!dependencies) { + throw new Error(`Unknown plugin: ${plugin}`); + } + + return this.getPluginStatuses$(dependencies).pipe( + // Prevent many emissions at once from dependency status resolution from making this too noisy + debounceTime(500) + ); + } + + public getDerivedStatus$(plugin: PluginName): Observable { + return combineLatest([this.deps.core$, this.getDependenciesStatus$(plugin)]).pipe( + map(([coreStatus, pluginStatuses]) => { + return getSummaryStatus( + [...Object.entries(coreStatus), ...Object.entries(pluginStatuses)], + { + allAvailableSummary: `All dependencies are available`, + } + ); + }) + ); + } + + private getPluginStatuses$(plugins: PluginName[]): Observable> { + if (plugins.length === 0) { + return of({}); + } + + return this.update$.pipe( + switchMap(() => { + const pluginStatuses = plugins + .map( + (depName) => + [depName, this.pluginStatuses.get(depName) ?? this.getDerivedStatus$(depName)] as [ + PluginName, + Observable + ] + ) + .map(([pName, status$]) => + status$.pipe(map((status) => [pName, status] as [PluginName, ServiceStatus])) + ); + + return combineLatest(pluginStatuses).pipe( + map((statuses) => Object.fromEntries(statuses)), + distinctUntilChanged(isDeepStrictEqual) + ); + }) + ); + } +} diff --git a/src/core/server/status/status_service.mock.ts b/src/core/server/status/status_service.mock.ts index 47ef8659b4079..42b3eecdca310 100644 --- a/src/core/server/status/status_service.mock.ts +++ b/src/core/server/status/status_service.mock.ts @@ -40,6 +40,9 @@ const createSetupContractMock = () => { const setupContract: jest.Mocked = { core$: new BehaviorSubject(availableCoreStatus), overall$: new BehaviorSubject(available), + set: jest.fn(), + dependencies$: new BehaviorSubject({}), + derivedStatus$: new BehaviorSubject(available), }; return setupContract; @@ -50,6 +53,11 @@ const createInternalSetupContractMock = () => { core$: new BehaviorSubject(availableCoreStatus), overall$: new BehaviorSubject(available), isStatusPageAnonymous: jest.fn().mockReturnValue(false), + plugins: { + set: jest.fn(), + getDependenciesStatus$: jest.fn(), + getDerivedStatus$: jest.fn(), + }, }; return setupContract; diff --git a/src/core/server/status/status_service.test.ts b/src/core/server/status/status_service.test.ts index 863fe34e8ecea..dcb1e0a559f5d 100644 --- a/src/core/server/status/status_service.test.ts +++ b/src/core/server/status/status_service.test.ts @@ -34,6 +34,7 @@ describe('StatusService', () => { service = new StatusService(mockCoreContext.create()); }); + const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); const available: ServiceStatus = { level: ServiceStatusLevels.available, summary: 'Available', @@ -53,6 +54,7 @@ describe('StatusService', () => { savedObjects: { status$: of(degraded), }, + pluginDependencies: new Map(), }); expect(await setup.core$.pipe(first()).toPromise()).toEqual({ elasticsearch: available, @@ -68,6 +70,7 @@ describe('StatusService', () => { savedObjects: { status$: of(degraded), }, + pluginDependencies: new Map(), }); const subResult1 = await setup.core$.pipe(first()).toPromise(); const subResult2 = await setup.core$.pipe(first()).toPromise(); @@ -96,6 +99,7 @@ describe('StatusService', () => { savedObjects: { status$: savedObjects$, }, + pluginDependencies: new Map(), }); const statusUpdates: CoreStatus[] = []; @@ -158,6 +162,7 @@ describe('StatusService', () => { savedObjects: { status$: of(degraded), }, + pluginDependencies: new Map(), }); expect(await setup.overall$.pipe(first()).toPromise()).toMatchObject({ level: ServiceStatusLevels.degraded, @@ -173,6 +178,7 @@ describe('StatusService', () => { savedObjects: { status$: of(degraded), }, + pluginDependencies: new Map(), }); const subResult1 = await setup.overall$.pipe(first()).toPromise(); const subResult2 = await setup.overall$.pipe(first()).toPromise(); @@ -201,26 +207,95 @@ describe('StatusService', () => { savedObjects: { status$: savedObjects$, }, + pluginDependencies: new Map(), }); const statusUpdates: ServiceStatus[] = []; const subscription = setup.overall$.subscribe((status) => statusUpdates.push(status)); + // Wait for timers to ensure that duplicate events are still filtered out regardless of debouncing. elasticsearch$.next(available); + await delay(500); elasticsearch$.next(available); + await delay(500); elasticsearch$.next({ level: ServiceStatusLevels.available, summary: `Wow another summary`, }); + await delay(500); savedObjects$.next(degraded); + await delay(500); savedObjects$.next(available); + await delay(500); savedObjects$.next(available); + await delay(500); subscription.unsubscribe(); expect(statusUpdates).toMatchInlineSnapshot(` Array [ Object { + "detail": "See the status page for more information", "level": degraded, + "meta": Object { + "affectedServices": Object { + "savedObjects": Object { + "level": degraded, + "summary": "This is degraded!", + }, + }, + }, + "summary": "[savedObjects]: This is degraded!", + }, + Object { + "level": available, + "summary": "All services are available", + }, + ] + `); + }); + + it('debounces events in quick succession', async () => { + const savedObjects$ = new BehaviorSubject(available); + const setup = await service.setup({ + elasticsearch: { + status$: new BehaviorSubject(available), + }, + savedObjects: { + status$: savedObjects$, + }, + pluginDependencies: new Map(), + }); + + const statusUpdates: ServiceStatus[] = []; + const subscription = setup.overall$.subscribe((status) => statusUpdates.push(status)); + + // All of these should debounced into a single `available` status + savedObjects$.next(degraded); + savedObjects$.next(available); + savedObjects$.next(degraded); + savedObjects$.next(available); + savedObjects$.next(degraded); + savedObjects$.next(available); + savedObjects$.next(degraded); + // Waiting for the debounce timeout should cut a new update + await delay(500); + savedObjects$.next(available); + await delay(500); + subscription.unsubscribe(); + + expect(statusUpdates).toMatchInlineSnapshot(` + Array [ + Object { + "detail": "See the status page for more information", + "level": degraded, + "meta": Object { + "affectedServices": Object { + "savedObjects": Object { + "level": degraded, + "summary": "This is degraded!", + }, + }, + }, "summary": "[savedObjects]: This is degraded!", }, Object { diff --git a/src/core/server/status/status_service.ts b/src/core/server/status/status_service.ts index aea335e64babf..8fe65eddb61d3 100644 --- a/src/core/server/status/status_service.ts +++ b/src/core/server/status/status_service.ts @@ -18,7 +18,7 @@ */ import { Observable, combineLatest } from 'rxjs'; -import { map, distinctUntilChanged, shareReplay, take } from 'rxjs/operators'; +import { map, distinctUntilChanged, shareReplay, take, debounceTime } from 'rxjs/operators'; import { isDeepStrictEqual } from 'util'; import { CoreService } from '../../types'; @@ -26,13 +26,16 @@ import { CoreContext } from '../core_context'; import { Logger } from '../logging'; import { InternalElasticsearchServiceSetup } from '../elasticsearch'; import { InternalSavedObjectsServiceSetup } from '../saved_objects'; +import { PluginName } from '../plugins'; import { config, StatusConfigType } from './status_config'; import { ServiceStatus, CoreStatus, InternalStatusServiceSetup } from './types'; import { getSummaryStatus } from './get_summary_status'; +import { PluginsStatusService } from './plugins_status'; interface SetupDeps { elasticsearch: Pick; + pluginDependencies: ReadonlyMap; savedObjects: Pick; } @@ -40,26 +43,44 @@ export class StatusService implements CoreService { private readonly logger: Logger; private readonly config$: Observable; + private pluginsStatus?: PluginsStatusService; + constructor(coreContext: CoreContext) { this.logger = coreContext.logger.get('status'); this.config$ = coreContext.configService.atPath(config.path); } - public async setup(core: SetupDeps) { + public async setup({ elasticsearch, pluginDependencies, savedObjects }: SetupDeps) { const statusConfig = await this.config$.pipe(take(1)).toPromise(); - const core$ = this.setupCoreStatus(core); - const overall$: Observable = core$.pipe( - map((coreStatus) => { - const summary = getSummaryStatus(Object.entries(coreStatus)); + const core$ = this.setupCoreStatus({ elasticsearch, savedObjects }); + this.pluginsStatus = new PluginsStatusService({ core$, pluginDependencies }); + + const overall$: Observable = combineLatest( + core$, + this.pluginsStatus.getAll$() + ).pipe( + // Prevent many emissions at once from dependency status resolution from making this too noisy + debounceTime(500), + map(([coreStatus, pluginsStatus]) => { + const summary = getSummaryStatus([ + ...Object.entries(coreStatus), + ...Object.entries(pluginsStatus), + ]); this.logger.debug(`Recalculated overall status`, { status: summary }); return summary; }), - distinctUntilChanged(isDeepStrictEqual) + distinctUntilChanged(isDeepStrictEqual), + shareReplay(1) ); return { core$, overall$, + plugins: { + set: this.pluginsStatus.set.bind(this.pluginsStatus), + getDependenciesStatus$: this.pluginsStatus.getDependenciesStatus$.bind(this.pluginsStatus), + getDerivedStatus$: this.pluginsStatus.getDerivedStatus$.bind(this.pluginsStatus), + }, isStatusPageAnonymous: () => statusConfig.allowAnonymous, }; } @@ -68,7 +89,10 @@ export class StatusService implements CoreService { public stop() {} - private setupCoreStatus({ elasticsearch, savedObjects }: SetupDeps): Observable { + private setupCoreStatus({ + elasticsearch, + savedObjects, + }: Pick): Observable { return combineLatest([elasticsearch.status$, savedObjects.status$]).pipe( map(([elasticsearchStatus, savedObjectsStatus]) => ({ elasticsearch: elasticsearchStatus, diff --git a/src/core/server/status/types.ts b/src/core/server/status/types.ts index 2ecf11deb2960..f884b80316fa8 100644 --- a/src/core/server/status/types.ts +++ b/src/core/server/status/types.ts @@ -19,6 +19,7 @@ import { Observable } from 'rxjs'; import { deepFreeze } from '../../utils'; +import { PluginName } from '../plugins'; /** * The current status of a service at a point in time. @@ -116,6 +117,60 @@ export interface CoreStatus { /** * API for accessing status of Core and this plugin's dependencies as well as for customizing this plugin's status. + * + * @remarks + * By default, a plugin inherits it's current status from the most severe status level of any Core services and any + * plugins that it depends on. This default status is available on the + * {@link ServiceStatusSetup.derivedStatus$ | core.status.derviedStatus$} API. + * + * Plugins may customize their status calculation by calling the {@link ServiceStatusSetup.set | core.status.set} API + * with an Observable. Within this Observable, a plugin may choose to only depend on the status of some of its + * dependencies, to ignore severe status levels of particular Core services they are not concerned with, or to make its + * status dependent on other external services. + * + * @example + * Customize a plugin's status to only depend on the status of SavedObjects: + * ```ts + * core.status.set( + * core.status.core$.pipe( + * . map((coreStatus) => { + * return coreStatus.savedObjects; + * }) ; + * ); + * ); + * ``` + * + * @example + * Customize a plugin's status to include an external service: + * ```ts + * const externalStatus$ = interval(1000).pipe( + * switchMap(async () => { + * const resp = await fetch(`https://myexternaldep.com/_healthz`); + * const body = await resp.json(); + * if (body.ok) { + * return of({ level: ServiceStatusLevels.available, summary: 'External Service is up'}); + * } else { + * return of({ level: ServiceStatusLevels.available, summary: 'External Service is unavailable'}); + * } + * }), + * catchError((error) => { + * of({ level: ServiceStatusLevels.unavailable, summary: `External Service is down`, meta: { error }}) + * }) + * ); + * + * core.status.set( + * combineLatest([core.status.derivedStatus$, externalStatus$]).pipe( + * map(([derivedStatus, externalStatus]) => { + * if (externalStatus.level > derivedStatus) { + * return externalStatus; + * } else { + * return derivedStatus; + * } + * }) + * ) + * ); + * ``` + * * @public */ export interface StatusServiceSetup { @@ -134,9 +189,43 @@ export interface StatusServiceSetup { * only depend on the statuses of {@link StatusServiceSetup.core$ | Core} or their dependencies. */ overall$: Observable; + + /** + * Allows a plugin to specify a custom status dependent on its own criteria. + * Completely overrides the default inherited status. + * + * @remarks + * See the {@link StatusServiceSetup.derivedStatus$} API for leveraging the default status + * calculation that is provided by Core. + */ + set(status$: Observable): void; + + /** + * Current status for all plugins this plugin depends on. + * Each key of the `Record` is a plugin id. + */ + dependencies$: Observable>; + + /** + * The status of this plugin as derived from its dependencies. + * + * @remarks + * By default, plugins inherit this derived status from their dependencies. + * Calling {@link StatusSetup.set} overrides this default status. + * + * This may emit multliple times for a single status change event as propagates + * through the dependency tree + */ + derivedStatus$: Observable; } /** @internal */ -export interface InternalStatusServiceSetup extends StatusServiceSetup { +export interface InternalStatusServiceSetup extends Pick { isStatusPageAnonymous: () => boolean; + // Namespaced under `plugins` key to improve clarity that these are APIs for plugins specifically. + plugins: { + set(plugin: PluginName, status$: Observable): void; + getDependenciesStatus$(plugin: PluginName): Observable>; + getDerivedStatus$(plugin: PluginName): Observable; + }; } diff --git a/src/core/server/ui_settings/create_or_upgrade_saved_config/integration_tests/create_or_upgrade.test.ts b/src/core/server/ui_settings/create_or_upgrade_saved_config/integration_tests/create_or_upgrade.test.ts index d2e31dad58e55..c7d5413ecca56 100644 --- a/src/core/server/ui_settings/create_or_upgrade_saved_config/integration_tests/create_or_upgrade.test.ts +++ b/src/core/server/ui_settings/create_or_upgrade_saved_config/integration_tests/create_or_upgrade.test.ts @@ -24,7 +24,7 @@ import { TestElasticsearchUtils, TestKibanaUtils, TestUtils, -} from '../../../../../test_utils/kbn_server'; +} from '../../../../test_helpers/kbn_server'; import { createOrUpgradeSavedConfig } from '../create_or_upgrade_saved_config'; import { loggingSystemMock } from '../../../logging/logging_system.mock'; import { httpServerMock } from '../../../http/http_server.mocks'; @@ -36,8 +36,6 @@ describe('createOrUpgradeSavedConfig()', () => { let esServer: TestElasticsearchUtils; let kbn: TestKibanaUtils; - let kbnServer: TestKibanaUtils['kbnServer']; - beforeAll(async function () { servers = createTestServers({ adjustTimeout: (t) => { @@ -46,10 +44,8 @@ describe('createOrUpgradeSavedConfig()', () => { }); esServer = await servers.startES(); kbn = await servers.startKibana(); - kbnServer = kbn.kbnServer; - const savedObjects = kbnServer.server.savedObjects; - savedObjectsClient = savedObjects.getScopedSavedObjectsClient( + savedObjectsClient = kbn.coreStart.savedObjects.getScopedClient( httpServerMock.createKibanaRequest() ); diff --git a/src/core/server/ui_settings/integration_tests/lib/servers.ts b/src/core/server/ui_settings/integration_tests/lib/servers.ts index 04979b69b32b9..0bdc821f42581 100644 --- a/src/core/server/ui_settings/integration_tests/lib/servers.ts +++ b/src/core/server/ui_settings/integration_tests/lib/servers.ts @@ -24,7 +24,7 @@ import { TestElasticsearchUtils, TestKibanaUtils, TestUtils, -} from '../../../../../test_utils/kbn_server'; +} from '../../../../test_helpers/kbn_server'; import { LegacyAPICaller } from '../../../elasticsearch/'; import { httpServerMock } from '../../../http/http_server.mocks'; @@ -68,8 +68,7 @@ export function getServices() { const callCluster = esServer.es.getCallCluster(); - const savedObjects = kbnServer.server.savedObjects; - const savedObjectsClient = savedObjects.getScopedSavedObjectsClient( + const savedObjectsClient = kbn.coreStart.savedObjects.getScopedClient( httpServerMock.createKibanaRequest() ); diff --git a/src/core/server/ui_settings/integration_tests/routes.test.ts b/src/core/server/ui_settings/integration_tests/routes.test.ts index b18cc370fac3c..063d68e3866b7 100644 --- a/src/core/server/ui_settings/integration_tests/routes.test.ts +++ b/src/core/server/ui_settings/integration_tests/routes.test.ts @@ -18,7 +18,7 @@ */ import { schema } from '@kbn/config-schema'; -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; describe('ui settings service', () => { describe('routes', () => { diff --git a/src/core/server/utils/index.ts b/src/core/server/utils/index.ts index b01a4c4e04899..d9c4217c4117f 100644 --- a/src/core/server/utils/index.ts +++ b/src/core/server/utils/index.ts @@ -20,3 +20,4 @@ export * from './crypto'; export * from './from_root'; export * from './package_json'; +export * from './streams'; diff --git a/src/core/server/utils/streams/concat_stream.test.ts b/src/core/server/utils/streams/concat_stream.test.ts new file mode 100644 index 0000000000000..e964ab2a7a97e --- /dev/null +++ b/src/core/server/utils/streams/concat_stream.test.ts @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createListStream, createPromiseFromStreams, createConcatStream } from './index'; + +describe('concatStream', () => { + test('accepts an initial value', async () => { + const output = await createPromiseFromStreams([ + createListStream([1, 2, 3]), + createConcatStream([0]), + ]); + + expect(output).toEqual([0, 1, 2, 3]); + }); + + describe(`combines using the previous value's concat method`, () => { + test('works with strings', async () => { + const output = await createPromiseFromStreams([ + createListStream(['a', 'b', 'c']), + createConcatStream(), + ]); + expect(output).toEqual('abc'); + }); + + test('works with arrays', async () => { + const output = await createPromiseFromStreams([ + createListStream([[1], [2, 3, 4], [10]]), + createConcatStream(), + ]); + expect(output).toEqual([1, 2, 3, 4, 10]); + }); + + test('works with a mixture, starting with array', async () => { + const output = await createPromiseFromStreams([ + createListStream([[], 1, 2, 3, 4, [5, 6, 7]]), + createConcatStream(), + ]); + expect(output).toEqual([1, 2, 3, 4, 5, 6, 7]); + }); + + test('fails when the value does not have a concat method', async () => { + let promise; + try { + promise = createPromiseFromStreams([createListStream([1, '1']), createConcatStream()]); + } catch (err) { + throw new Error('createPromiseFromStreams() should not fail synchronously'); + } + + try { + await promise; + throw new Error('Promise should have rejected'); + } catch (err) { + expect(err).toBeInstanceOf(Error); + expect(err.message).toContain('concat'); + } + }); + }); +}); diff --git a/src/core/server/utils/streams/concat_stream.ts b/src/core/server/utils/streams/concat_stream.ts new file mode 100644 index 0000000000000..03450cb51b832 --- /dev/null +++ b/src/core/server/utils/streams/concat_stream.ts @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createReduceStream } from './reduce_stream'; + +/** + * Creates a Transform stream that consumes all provided + * values and concatenates them using each values `concat` + * method. + * + * Concatenate strings: + * createListStream(['f', 'o', 'o']) + * .pipe(createConcatStream()) + * .on('data', console.log) + * // logs "foo" + * + * Concatenate values into an array: + * createListStream([1,2,3]) + * .pipe(createConcatStream([])) + * .on('data', console.log) + * // logs "[1,2,3]" + * + * + * @param {any} initial The initial value that subsequent + * items will concat with + * @return {Transform} + */ +export function createConcatStream(initial?: T) { + return createReduceStream((acc, chunk) => acc.concat(chunk), initial); +} diff --git a/src/core/server/utils/streams/concat_stream_providers.test.ts b/src/core/server/utils/streams/concat_stream_providers.test.ts new file mode 100644 index 0000000000000..b742a770b70c8 --- /dev/null +++ b/src/core/server/utils/streams/concat_stream_providers.test.ts @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Readable } from 'stream'; + +import { concatStreamProviders } from './concat_stream_providers'; +import { createListStream } from './list_stream'; +import { createConcatStream } from './concat_stream'; +import { createPromiseFromStreams } from './promise_from_streams'; + +describe('concatStreamProviders() helper', () => { + test('writes the data from an array of stream providers into a destination stream in order', async () => { + const results = await createPromiseFromStreams([ + concatStreamProviders([ + () => createListStream(['foo', 'bar']), + () => createListStream(['baz']), + () => createListStream(['bug']), + ]), + createConcatStream(''), + ]); + + expect(results).toBe('foobarbazbug'); + }); + + test('emits the errors from a sub-stream to the destination', async () => { + const dest = concatStreamProviders([ + () => createListStream(['foo', 'bar']), + () => + new Readable({ + read() { + this.emit('error', new Error('foo')); + }, + }), + ]); + + const errorListener = jest.fn(); + dest.on('error', errorListener); + + await expect(createPromiseFromStreams([dest])).rejects.toThrowErrorMatchingInlineSnapshot( + `"foo"` + ); + expect(errorListener.mock.calls).toMatchInlineSnapshot(` +Array [ + Array [ + [Error: foo], + ], +] +`); + }); +}); diff --git a/src/core/server/utils/streams/concat_stream_providers.ts b/src/core/server/utils/streams/concat_stream_providers.ts new file mode 100644 index 0000000000000..bb836e3d73787 --- /dev/null +++ b/src/core/server/utils/streams/concat_stream_providers.ts @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Readable, PassThrough, TransformOptions } from 'stream'; + +/** + * Write the data and errors from a list of stream providers + * to a single stream in order. Stream providers are only + * called right before they will be consumed, and only one + * provider will be active at a time. + * + * @param {Array<() => ReadableStream>} sourceProviders + * @param {PassThroughOptions} options options passed to the PassThrough constructor + * @return {WritableStream} combined stream + */ +export function concatStreamProviders( + sourceProviders: Array<() => Readable>, + options?: TransformOptions +) { + const destination = new PassThrough(options); + const queue = sourceProviders.slice(); + + (function pipeNext() { + const provider = queue.shift(); + + if (!provider) { + return; + } + + const source = provider(); + const isLast = !queue.length; + + // if there are more sources to pipe, hook + // into the source completion + if (!isLast) { + source.once('end', pipeNext); + } + + source + // proxy errors from the source to the destination + .once('error', (error) => destination.emit('error', error)) + // pipe the source to the destination but only proxy the + // end event if this is the last source + .pipe(destination, { end: isLast }); + })(); + + return destination; +} diff --git a/src/core/server/utils/streams/filter_stream.test.ts b/src/core/server/utils/streams/filter_stream.test.ts new file mode 100644 index 0000000000000..41073e54b0a84 --- /dev/null +++ b/src/core/server/utils/streams/filter_stream.test.ts @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + createConcatStream, + createFilterStream, + createListStream, + createPromiseFromStreams, +} from './index'; + +describe('createFilterStream()', () => { + test('calls the function with each item in the source stream', async () => { + const filter = jest.fn().mockReturnValue(true); + + await createPromiseFromStreams([createListStream(['a', 'b', 'c']), createFilterStream(filter)]); + + expect(filter).toMatchInlineSnapshot(` + [MockFunction] { + "calls": Array [ + Array [ + "a", + ], + Array [ + "b", + ], + Array [ + "c", + ], + ], + "results": Array [ + Object { + "type": "return", + "value": true, + }, + Object { + "type": "return", + "value": true, + }, + Object { + "type": "return", + "value": true, + }, + ], + } + `); + }); + + test('send the filtered values on the output stream', async () => { + const result = await createPromiseFromStreams([ + createListStream([1, 2, 3]), + createFilterStream((n) => n % 2 === 0), + createConcatStream([]), + ]); + + expect(result).toMatchInlineSnapshot(` + Array [ + 2, + ] + `); + }); +}); diff --git a/src/legacy/utils/streams/filter_stream.ts b/src/core/server/utils/streams/filter_stream.ts similarity index 100% rename from src/legacy/utils/streams/filter_stream.ts rename to src/core/server/utils/streams/filter_stream.ts diff --git a/src/core/server/utils/streams/index.ts b/src/core/server/utils/streams/index.ts new file mode 100644 index 0000000000000..447d1ed5b1c53 --- /dev/null +++ b/src/core/server/utils/streams/index.ts @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { concatStreamProviders } from './concat_stream_providers'; +export { createIntersperseStream } from './intersperse_stream'; +export { createSplitStream } from './split_stream'; +export { createListStream } from './list_stream'; +export { createReduceStream } from './reduce_stream'; +export { createPromiseFromStreams } from './promise_from_streams'; +export { createConcatStream } from './concat_stream'; +export { createMapStream } from './map_stream'; +export { createReplaceStream } from './replace_stream'; +export { createFilterStream } from './filter_stream'; diff --git a/src/core/server/utils/streams/intersperse_stream.test.ts b/src/core/server/utils/streams/intersperse_stream.test.ts new file mode 100644 index 0000000000000..9aa15035d2a1c --- /dev/null +++ b/src/core/server/utils/streams/intersperse_stream.test.ts @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + createPromiseFromStreams, + createListStream, + createIntersperseStream, + createConcatStream, +} from './index'; + +describe('intersperseStream', () => { + test('places the intersperse value between each provided value', async () => { + expect( + await createPromiseFromStreams([ + createListStream(['to', 'be', 'or', 'not', 'to', 'be']), + createIntersperseStream(' '), + createConcatStream(), + ]) + ).toBe('to be or not to be'); + }); + + test('emits values as soon as possible, does not needlessly buffer', async () => { + const str = createIntersperseStream('y'); + const onData = jest.fn(); + str.on('data', onData); + + str.write('a'); + expect(onData).toHaveBeenCalledTimes(1); + expect(onData.mock.calls[0]).toEqual(['a']); + onData.mockClear(); + + str.write('b'); + expect(onData).toHaveBeenCalledTimes(2); + expect(onData.mock.calls[0]).toEqual(['y']); + expect(onData).toHaveBeenCalledTimes(2); + expect(onData.mock.calls[1]).toEqual(['b']); + }); +}); diff --git a/src/core/server/utils/streams/intersperse_stream.ts b/src/core/server/utils/streams/intersperse_stream.ts new file mode 100644 index 0000000000000..272507221caff --- /dev/null +++ b/src/core/server/utils/streams/intersperse_stream.ts @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +/** + * Create a Transform stream that receives values in object mode, + * and intersperses a chunk between each object received. + * + * This is useful for writing lists: + * + * createListStream(['foo', 'bar']) + * .pipe(createIntersperseStream('\n')) + * .pipe(process.stdout) // outputs "foo\nbar" + * + * Combine with a concat stream to get "join" like functionality: + * + * await createPromiseFromStreams([ + * createListStream(['foo', 'bar']), + * createIntersperseStream(' '), + * createConcatStream() + * ]) // produces a single value "foo bar" + * + * @param {String|Buffer} intersperseChunk + * @return {Transform} + */ +export function createIntersperseStream(intersperseChunk: string | Buffer) { + let first = true; + + return new Transform({ + writableObjectMode: true, + readableObjectMode: true, + transform(chunk, enc, callback) { + try { + if (first) { + first = false; + } else { + this.push(intersperseChunk); + } + + this.push(chunk); + callback(); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/src/core/server/utils/streams/list_stream.test.ts b/src/core/server/utils/streams/list_stream.test.ts new file mode 100644 index 0000000000000..2a20c929db6b9 --- /dev/null +++ b/src/core/server/utils/streams/list_stream.test.ts @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createListStream } from './index'; + +describe('listStream', () => { + test('provides the values in the initial list', async () => { + const str = createListStream([1, 2, 3, 4]); + const onData = jest.fn(); + str.on('data', onData); + + await new Promise((resolve) => str.on('end', resolve)); + + expect(onData).toHaveBeenCalledTimes(4); + expect(onData.mock.calls[0]).toEqual([1]); + expect(onData.mock.calls[1]).toEqual([2]); + expect(onData.mock.calls[2]).toEqual([3]); + expect(onData.mock.calls[3]).toEqual([4]); + }); + + test('does not modify the list passed', async () => { + const list = [1, 2, 3, 4]; + const str = createListStream(list); + str.resume(); + await new Promise((resolve) => str.on('end', resolve)); + expect(list).toEqual([1, 2, 3, 4]); + }); +}); diff --git a/src/core/server/utils/streams/list_stream.ts b/src/core/server/utils/streams/list_stream.ts new file mode 100644 index 0000000000000..e62f6d3fa930b --- /dev/null +++ b/src/core/server/utils/streams/list_stream.ts @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Readable } from 'stream'; + +/** + * Create a Readable stream that provides the items + * from a list as objects to subscribers + * + * @param {Array} items - the list of items to provide + * @return {Readable} + */ +export function createListStream(items: T | T[] = []) { + const queue = Array.isArray(items) ? [...items] : [items]; + + return new Readable({ + objectMode: true, + read(size) { + queue.splice(0, size).forEach((item) => { + this.push(item); + }); + + if (!queue.length) { + this.push(null); + } + }, + }); +} diff --git a/src/core/server/utils/streams/map_stream.test.ts b/src/core/server/utils/streams/map_stream.test.ts new file mode 100644 index 0000000000000..bf0cab39c21f4 --- /dev/null +++ b/src/core/server/utils/streams/map_stream.test.ts @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { delay } from 'bluebird'; + +import { createPromiseFromStreams } from './promise_from_streams'; +import { createListStream } from './list_stream'; +import { createMapStream } from './map_stream'; +import { createConcatStream } from './concat_stream'; + +describe('createMapStream()', () => { + test('calls the function with each item in the source stream', async () => { + const mapper = jest.fn(); + + await createPromiseFromStreams([createListStream(['a', 'b', 'c']), createMapStream(mapper)]); + + expect(mapper).toHaveBeenCalledTimes(3); + expect(mapper).toHaveBeenCalledWith('a', 0); + expect(mapper).toHaveBeenCalledWith('b', 1); + expect(mapper).toHaveBeenCalledWith('c', 2); + }); + + test('send the return value from the mapper on the output stream', async () => { + const result = await createPromiseFromStreams([ + createListStream([1, 2, 3]), + createMapStream((n: number) => n * 100), + createConcatStream([]), + ]); + + expect(result).toEqual([100, 200, 300]); + }); + + test('supports async mappers', async () => { + const result = await createPromiseFromStreams([ + createListStream([1, 2, 3]), + createMapStream(async (n: number, i: number) => { + await delay(n); + return n * i; + }), + createConcatStream([]), + ]); + + expect(result).toEqual([0, 2, 6]); + }); +}); diff --git a/src/core/server/utils/streams/map_stream.ts b/src/core/server/utils/streams/map_stream.ts new file mode 100644 index 0000000000000..aad53cc526626 --- /dev/null +++ b/src/core/server/utils/streams/map_stream.ts @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +export function createMapStream(fn: (value: T, i: number) => void) { + let i = 0; + + return new Transform({ + objectMode: true, + async transform(value, enc, done) { + try { + this.push(await fn(value, i++)); + done(); + } catch (err) { + done(err); + } + }, + }); +} diff --git a/src/core/server/utils/streams/promise_from_streams.test.ts b/src/core/server/utils/streams/promise_from_streams.test.ts new file mode 100644 index 0000000000000..1f2596c16a6fa --- /dev/null +++ b/src/core/server/utils/streams/promise_from_streams.test.ts @@ -0,0 +1,135 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Readable, Writable, Duplex, Transform } from 'stream'; + +import { createListStream, createPromiseFromStreams, createReduceStream } from './index'; + +describe('promiseFromStreams', () => { + test('pipes together an array of streams', async () => { + const str1 = createListStream([1, 2, 3]); + const str2 = createReduceStream((acc, n) => acc + n, 0); + const sumPromise = new Promise((resolve) => str2.once('data', resolve)); + createPromiseFromStreams([str1, str2]); + await new Promise((resolve) => str2.once('end', resolve)); + expect(await sumPromise).toBe(6); + }); + + describe('last stream is writable', () => { + test('waits for the last stream to finish writing', async () => { + let written = ''; + + await createPromiseFromStreams([ + createListStream(['a']), + new Writable({ + write(chunk, enc, cb) { + setTimeout(() => { + written += chunk; + cb(); + }, 100); + }, + }), + ]); + + expect(written).toBe('a'); + }); + + test('resolves to undefined', async () => { + const result = await createPromiseFromStreams([ + createListStream(['a']), + new Writable({ + write(chunk, enc, cb) { + cb(); + }, + }), + ]); + + expect(result).toBe(undefined); + }); + }); + + describe('last stream is readable', () => { + test(`resolves to it's final value`, async () => { + const result = await createPromiseFromStreams([createListStream(['a', 'b', 'c'])]); + + expect(result).toBe('c'); + }); + }); + + describe('last stream is duplex', () => { + test('waits for writing and resolves to final value', async () => { + let written = ''; + + const duplexReadQueue: Array> = []; + const duplexItemsToPush = ['foo', 'bar', null]; + const result = await createPromiseFromStreams([ + createListStream(['a', 'b', 'c']), + new Duplex({ + async read() { + this.push(await duplexReadQueue.shift()); + }, + + write(chunk, enc, cb) { + duplexReadQueue.push( + new Promise((resolve) => { + setTimeout(() => { + written += chunk; + cb(); + resolve(duplexItemsToPush.shift()); + }, 50); + }) + ); + }, + }).setEncoding('utf8'), + ]); + + expect(written).toEqual('abc'); + expect(result).toBe('bar'); + }); + }); + + describe('error handling', () => { + test('read stream gets destroyed when transform stream fails', async () => { + let destroyCalled = false; + const readStream = new Readable({ + read() { + this.push('a'); + this.push('b'); + this.push('c'); + this.push(null); + }, + destroy() { + destroyCalled = true; + }, + }); + const transformStream = new Transform({ + transform(chunk, enc, done) { + done(new Error('Test error')); + }, + }); + try { + await createPromiseFromStreams([readStream, transformStream]); + throw new Error('Should fail'); + } catch (e) { + expect(e.message).toBe('Test error'); + expect(destroyCalled).toBe(true); + } + }); + }); +}); diff --git a/src/core/server/utils/streams/promise_from_streams.ts b/src/core/server/utils/streams/promise_from_streams.ts new file mode 100644 index 0000000000000..f5fc4af62bc83 --- /dev/null +++ b/src/core/server/utils/streams/promise_from_streams.ts @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Take an array of streams, pipe the output + * from each one into the next, listening for + * errors from any of the streams, and then resolve + * the promise once the final stream has finished + * writing/reading. + * + * If the last stream is readable, it's final value + * will be provided as the promise value. + * + * Errors emitted from any stream will cause + * the promise to be rejected with that error. + * + * @param {Array} streams + * @return {Promise} + */ + +import { pipeline, Writable, Readable } from 'stream'; + +function isReadable(stream: Readable | Writable): stream is Readable { + return 'read' in stream && typeof stream.read === 'function'; +} + +export async function createPromiseFromStreams(streams: [Readable, ...Writable[]]): Promise { + let finalChunk: any; + const last = streams[streams.length - 1]; + if (!isReadable(last) && streams.length === 1) { + // For a nicer error than what stream.pipeline throws + throw new Error('A minimum of 2 streams is required when a non-readable stream is given'); + } + if (isReadable(last)) { + // We are pushing a writable stream to capture the last chunk + streams.push( + new Writable({ + // Use object mode even when "last" stream isn't. This allows to + // capture the last chunk as-is. + objectMode: true, + write(chunk, enc, done) { + finalChunk = chunk; + done(); + }, + }) + ); + } + + return new Promise((resolve, reject) => { + // @ts-expect-error 'pipeline' doesn't support variable length of arguments + pipeline(...streams, (err) => { + if (err) return reject(err); + resolve(finalChunk); + }); + }); +} diff --git a/src/core/server/utils/streams/reduce_stream.test.ts b/src/core/server/utils/streams/reduce_stream.test.ts new file mode 100644 index 0000000000000..e4a7dc1cef491 --- /dev/null +++ b/src/core/server/utils/streams/reduce_stream.test.ts @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Transform } from 'stream'; +import { createReduceStream, createPromiseFromStreams, createListStream } from './index'; + +const promiseFromEvent = (name: string, emitter: Transform) => + new Promise((resolve) => emitter.on(name, () => resolve(name))); + +describe('reduceStream', () => { + test('calls the reducer for each item provided', async () => { + const stub = jest.fn(); + await createPromiseFromStreams([ + createListStream([1, 2, 3]), + createReduceStream((val, chunk, enc) => { + stub(val, chunk, enc); + return chunk; + }, 0), + ]); + expect(stub).toHaveBeenCalledTimes(3); + expect(stub.mock.calls[0]).toEqual([0, 1, 'utf8']); + expect(stub.mock.calls[1]).toEqual([1, 2, 'utf8']); + expect(stub.mock.calls[2]).toEqual([2, 3, 'utf8']); + }); + + test('provides the return value of the last iteration of the reducer', async () => { + const result = await createPromiseFromStreams([ + createListStream('abcdefg'.split('')), + createReduceStream((acc) => acc + 1, 0), + ]); + expect(result).toBe(7); + }); + + test('emits an error if an iteration fails', async () => { + const reduce = createReduceStream((acc, i) => { + expect(i).toBe(1); + return acc; + }, 0); + const errorEvent = promiseFromEvent('error', reduce); + + reduce.write(1); + reduce.write(2); + reduce.resume(); + await errorEvent; + }); + + test('stops calling the reducer if an iteration fails, emits no data', async () => { + const reducer = jest.fn((acc, i) => { + if (i < 100) return acc + i; + else throw new Error(i); + }); + const reduce$ = createReduceStream(reducer, 0); + + const dataStub = jest.fn(); + const errorStub = jest.fn(); + reduce$.on('data', dataStub); + reduce$.on('error', errorStub); + const endEvent = promiseFromEvent('end', reduce$); + + reduce$.write(1); + reduce$.write(2); + reduce$.write(300); + reduce$.write(400); + reduce$.write(1000); + reduce$.end(); + + await endEvent; + expect(reducer).toHaveBeenCalledTimes(3); + expect(dataStub).toHaveBeenCalledTimes(0); + expect(errorStub).toHaveBeenCalledTimes(1); + }); +}); diff --git a/src/core/server/utils/streams/reduce_stream.ts b/src/core/server/utils/streams/reduce_stream.ts new file mode 100644 index 0000000000000..9129df096ad13 --- /dev/null +++ b/src/core/server/utils/streams/reduce_stream.ts @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +/** + * Create a transform stream that consumes each chunk it receives + * and passes it to the reducer, which will return the new value + * for the stream. Once all chunks have been received the reduce + * stream provides the result of final call to the reducer to + * subscribers. + * + * @param {Function} + * @param {any} initial Initial value for the stream, if undefined + * then the first chunk provided is used as the + * initial value. + * @return {Transform} + */ +export function createReduceStream( + reducer: (value: any, chunk: T, enc: string) => T, + initial?: T +) { + let i = -1; + let value = initial; + + // if the reducer throws an error then the value is + // considered invalid and the stream will never provide + // it to subscribers. We will also stop calling the + // reducer for any new data that is provided to us + let failed = false; + + if (typeof reducer !== 'function') { + throw new TypeError('reducer must be a function'); + } + + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + async transform(chunk, enc, callback) { + try { + if (failed) { + return callback(); + } + + i += 1; + if (i === 0 && initial === undefined) { + value = chunk; + } else { + value = await reducer(value, chunk, enc); + } + + callback(); + } catch (err) { + failed = true; + callback(err); + } + }, + + flush(callback) { + if (!failed) { + this.push(value); + } + + callback(); + }, + }); +} diff --git a/src/core/server/utils/streams/replace_stream.test.ts b/src/core/server/utils/streams/replace_stream.test.ts new file mode 100644 index 0000000000000..c9da42395fb85 --- /dev/null +++ b/src/core/server/utils/streams/replace_stream.test.ts @@ -0,0 +1,132 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Writable, Readable } from 'stream'; + +import { + createReplaceStream, + createConcatStream, + createPromiseFromStreams, + createListStream, + createMapStream, +} from './index'; + +async function concatToString(streams: [Readable, ...Writable[]]) { + return await createPromiseFromStreams([ + ...streams, + createMapStream((buff: Buffer) => buff.toString('utf8')), + createConcatStream(''), + ]); +} + +describe('replaceStream', () => { + test('produces buffers when it receives buffers', async () => { + const chunks = await createPromiseFromStreams([ + createListStream([Buffer.from('foo'), Buffer.from('bar')]), + createReplaceStream('o', '0'), + createConcatStream([]), + ]); + + chunks.forEach((chunk) => { + expect(chunk).toBeInstanceOf(Buffer); + }); + }); + + test('produces buffers when it receives strings', async () => { + const chunks = await createPromiseFromStreams([ + createListStream(['foo', 'bar']), + createReplaceStream('o', '0'), + createConcatStream([]), + ]); + + chunks.forEach((chunk) => { + expect(chunk).toBeInstanceOf(Buffer); + }); + }); + + test('expects toReplace to be a string', () => { + // @ts-expect-error + expect(() => createReplaceStream(Buffer.from('foo'))).toThrowError(/be a string/); + }); + + test('replaces multiple single-char instances in a single chunk', async () => { + expect( + await concatToString([ + createListStream([Buffer.from('f00 bar')]), + createReplaceStream('0', 'o'), + ]) + ).toBe('foo bar'); + }); + + test('replaces multiple single-char instances in multiple chunks', async () => { + expect( + await concatToString([ + createListStream([Buffer.from('f0'), Buffer.from('0 bar')]), + createReplaceStream('0', 'o'), + ]) + ).toBe('foo bar'); + }); + + test('replaces single multi-char instances in single chunks', async () => { + expect( + await concatToString([ + createListStream([Buffer.from('f0'), Buffer.from('0 bar')]), + createReplaceStream('0', 'o'), + ]) + ).toBe('foo bar'); + }); + + test('replaces multiple multi-char instances in single chunks', async () => { + expect( + await concatToString([ + createListStream([Buffer.from('foo ba'), Buffer.from('r b'), Buffer.from('az bar')]), + createReplaceStream('bar', '*'), + ]) + ).toBe('foo * baz *'); + }); + + test('replaces multi-char instance that stretches multiple chunks', async () => { + expect( + await concatToString([ + createListStream([ + Buffer.from('foo supe'), + Buffer.from('rcalifra'), + Buffer.from('gilistic'), + Buffer.from('expialid'), + Buffer.from('ocious bar'), + ]), + createReplaceStream('supercalifragilisticexpialidocious', '*'), + ]) + ).toBe('foo * bar'); + }); + + test('ignores missing multi-char instance', async () => { + expect( + await concatToString([ + createListStream([ + Buffer.from('foo supe'), + Buffer.from('rcalifra'), + Buffer.from('gili stic'), + Buffer.from('expialid'), + Buffer.from('ocious bar'), + ]), + createReplaceStream('supercalifragilisticexpialidocious', '*'), + ]) + ).toBe('foo supercalifragili sticexpialidocious bar'); + }); +}); diff --git a/src/core/server/utils/streams/replace_stream.ts b/src/core/server/utils/streams/replace_stream.ts new file mode 100644 index 0000000000000..05391bb3341c2 --- /dev/null +++ b/src/core/server/utils/streams/replace_stream.ts @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +export function createReplaceStream(toReplace: string, replacement: string | Buffer) { + if (typeof toReplace !== 'string') { + throw new TypeError('toReplace must be a string'); + } + + let buffer = Buffer.alloc(0); + return new Transform({ + objectMode: false, + async transform(value, enc, done) { + try { + buffer = Buffer.concat([buffer, value], buffer.length + value.length); + + while (true) { + // try to find the next instance of `toReplace` in buffer + const index = buffer.indexOf(toReplace); + + // if there is no next instance, break + if (index === -1) { + break; + } + + // flush everything to the left of the next instance + // of `toReplace` + this.push(buffer.slice(0, index)); + + // then flush an instance of `replacement` + this.push(replacement); + + // and finally update the buffer to include everything + // to the right of `toReplace`, dropping to replace from the buffer + buffer = buffer.slice(index + toReplace.length); + } + + // until now we have only flushed data that is to the left + // of a discovered instance of `toReplace`. If `toReplace` is + // never found this would lead to us buffering the entire stream. + // + // Instead, we only keep enough buffer to complete a potentially + // partial instance of `toReplace` + if (buffer.length > toReplace.length) { + // the entire buffer except the last `toReplace.length` bytes + // so that if all but one byte from `toReplace` is in the buffer, + // and the next chunk delivers the necessary byte, the buffer will then + // contain a complete `toReplace` token. + this.push(buffer.slice(0, buffer.length - toReplace.length)); + buffer = buffer.slice(-toReplace.length); + } + + done(); + } catch (err) { + done(err); + } + }, + + flush(callback) { + if (buffer.length) { + this.push(buffer); + } + + // @ts-expect-error + buffer = null; + callback(); + }, + }); +} diff --git a/src/core/server/utils/streams/split_stream.test.ts b/src/core/server/utils/streams/split_stream.test.ts new file mode 100644 index 0000000000000..f131bd0661e54 --- /dev/null +++ b/src/core/server/utils/streams/split_stream.test.ts @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Transform } from 'stream'; +import { createSplitStream, createConcatStream, createPromiseFromStreams } from './index'; + +async function split(stream: Transform, input: Array) { + const concat = createConcatStream(); + concat.write([]); + stream.pipe(concat); + const output = createPromiseFromStreams([concat]); + + input.forEach((i: any) => { + stream.write(i); + }); + stream.end(); + + return await output; +} + +describe('splitStream', () => { + test('splits buffers, produces strings', async () => { + const output = await split(createSplitStream('&'), [Buffer.from('foo&bar')]); + expect(output).toEqual(['foo', 'bar']); + }); + + test('supports mixed input', async () => { + const output = await split(createSplitStream('&'), [Buffer.from('foo&b'), 'ar']); + expect(output).toEqual(['foo', 'bar']); + }); + + test('supports buffer split chunks', async () => { + const output = await split(createSplitStream(Buffer.from('&')), ['foo&b', 'ar']); + expect(output).toEqual(['foo', 'bar']); + }); + + test('splits provided values by a delimiter', async () => { + const output = await split(createSplitStream('&'), ['foo&b', 'ar']); + expect(output).toEqual(['foo', 'bar']); + }); + + test('handles multi-character delimiters', async () => { + const output = await split(createSplitStream('oo'), ['foo&b', 'ar']); + expect(output).toEqual(['f', '&bar']); + }); + + test('handles delimiters that span multiple chunks', async () => { + const output = await split(createSplitStream('ba'), ['foo&b', 'ar']); + expect(output).toEqual(['foo&', 'r']); + }); + + test('produces an empty chunk if the split char is at the end of the input', async () => { + const output = await split(createSplitStream('&bar'), ['foo&b', 'ar']); + expect(output).toEqual(['foo', '']); + }); +}); diff --git a/src/core/server/utils/streams/split_stream.ts b/src/core/server/utils/streams/split_stream.ts new file mode 100644 index 0000000000000..ae820f60abbf6 --- /dev/null +++ b/src/core/server/utils/streams/split_stream.ts @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Transform } from 'stream'; + +/** + * Creates a Transform stream that consumes a stream of Buffers + * and produces a stream of strings (in object mode) by splitting + * the received bytes using the splitChunk. + * + * Ways this is behaves like String#split: + * - instances of splitChunk are removed from the input + * - splitChunk can be on any size + * - if there are no bytes found after the last splitChunk + * a final empty chunk is emitted + * + * Ways this deviates from String#split: + * - splitChunk cannot be a regexp + * - an empty string or Buffer will not produce a stream of individual + * bytes like `string.split('')` would + * + * @param {String} splitChunk + * @return {Transform} + */ +export function createSplitStream(splitChunk: string | Uint8Array) { + let unsplitBuffer = Buffer.alloc(0); + + return new Transform({ + writableObjectMode: false, + readableObjectMode: true, + transform(chunk, enc, callback) { + try { + let i; + let toSplit = Buffer.concat([unsplitBuffer, chunk]); + while ((i = toSplit.indexOf(splitChunk)) !== -1) { + const slice = toSplit.slice(0, i); + toSplit = toSplit.slice(i + splitChunk.length); + this.push(slice.toString('utf8')); + } + + unsplitBuffer = toSplit; + callback(); + } catch (err) { + callback(err); + } + }, + + flush(callback) { + try { + this.push(unsplitBuffer.toString('utf8')); + + callback(); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/src/test_utils/public/http_test_setup.ts b/src/core/test_helpers/http_test_setup.ts similarity index 85% rename from src/test_utils/public/http_test_setup.ts rename to src/core/test_helpers/http_test_setup.ts index 7c70f64887af1..50ea43fb22b5e 100644 --- a/src/test_utils/public/http_test_setup.ts +++ b/src/core/test_helpers/http_test_setup.ts @@ -17,9 +17,9 @@ * under the License. */ -import { HttpService } from '../../core/public/http'; -import { fatalErrorsServiceMock } from '../../core/public/fatal_errors/fatal_errors_service.mock'; -import { injectedMetadataServiceMock } from '../../core/public/injected_metadata/injected_metadata_service.mock'; +import { HttpService } from '../public/http'; +import { fatalErrorsServiceMock } from '../public/fatal_errors/fatal_errors_service.mock'; +import { injectedMetadataServiceMock } from '../public/injected_metadata/injected_metadata_service.mock'; export type SetupTap = ( injectedMetadata: ReturnType, diff --git a/src/test_utils/kbn_server.ts b/src/core/test_helpers/kbn_server.ts similarity index 95% rename from src/test_utils/kbn_server.ts rename to src/core/test_helpers/kbn_server.ts index e44ce0de403d9..488c4b919d3e4 100644 --- a/src/test_utils/kbn_server.ts +++ b/src/core/test_helpers/kbn_server.ts @@ -26,16 +26,17 @@ import { kibanaServerTestUser, kibanaTestUser, setupUsers, - // @ts-ignore: implicit any for JS file } from '@kbn/test'; import { defaultsDeep, get } from 'lodash'; import { resolve } from 'path'; import { BehaviorSubject } from 'rxjs'; import supertest from 'supertest'; -import { LegacyAPICaller } from '../core/server'; -import { CliArgs, Env } from '../core/server/config'; -import { Root } from '../core/server/root'; -import KbnServer from '../legacy/server/kbn_server'; + +import { CoreStart } from 'src/core/server'; +import { LegacyAPICaller } from '../server/elasticsearch'; +import { CliArgs, Env } from '../server/config'; +import { Root } from '../server/root'; +import KbnServer from '../../legacy/server/kbn_server'; export type HttpMethod = 'delete' | 'get' | 'head' | 'post' | 'put'; @@ -53,7 +54,7 @@ const DEFAULTS_SETTINGS = { }; const DEFAULT_SETTINGS_WITH_CORE_PLUGINS = { - plugins: { scanDirs: [resolve(__dirname, '../legacy/core_plugins')] }, + plugins: { scanDirs: [resolve(__dirname, '../../legacy/core_plugins')] }, elasticsearch: { hosts: [esTestConfig.getUrl()], username: kibanaServerTestUser.username, @@ -170,6 +171,7 @@ export interface TestElasticsearchUtils { export interface TestKibanaUtils { root: Root; + coreStart: CoreStart; kbnServer: KbnServer; stop: () => Promise; } @@ -289,13 +291,14 @@ export function createTestServers({ const root = createRootWithCorePlugins(kbnSettings); await root.setup(); - await root.start(); + const coreStart = await root.start(); const kbnServer = getKbnServer(root); return { root, kbnServer, + coreStart, stop: async () => await root.shutdown(), }; }, diff --git a/src/dev/build/lib/watch_stdio_for_line.ts b/src/dev/build/lib/watch_stdio_for_line.ts index 2322d017abc61..3d7929ccfc33a 100644 --- a/src/dev/build/lib/watch_stdio_for_line.ts +++ b/src/dev/build/lib/watch_stdio_for_line.ts @@ -24,7 +24,7 @@ import { createPromiseFromStreams, createSplitStream, createMapStream, -} from '../../../legacy/utils/streams'; +} from '../../../core/server/utils'; // creates a stream that skips empty lines unless they are followed by // another line, preventing the empty lines produced by splitStream diff --git a/src/dev/build/tasks/create_archives_task.ts b/src/dev/build/tasks/create_archives_task.ts index 0083881e9f748..a05e383394ecf 100644 --- a/src/dev/build/tasks/create_archives_task.ts +++ b/src/dev/build/tasks/create_archives_task.ts @@ -92,8 +92,8 @@ export const CreateArchives: Task = { }); metrics.push({ - group: `${build.isOss() ? 'oss ' : ''}distributable file count`, - id: 'total', + group: 'distributable file count', + id: build.isOss() ? 'oss' : 'default', value: fileCount, }); } diff --git a/src/dev/jest/config.js b/src/dev/jest/config.js index 74e1ec5e2b4ed..486c8563c5456 100644 --- a/src/dev/jest/config.js +++ b/src/dev/jest/config.js @@ -59,7 +59,6 @@ export default { '@elastic/eui/lib/(.*)?': '/node_modules/@elastic/eui/test-env/$1', '^src/plugins/(.*)': '/src/plugins/$1', '^plugins/([^/.]*)(.*)': '/src/legacy/core_plugins/$1/public$2', - '^ui/(.*)': '/src/legacy/ui/public/$1', '^uiExports/(.*)': '/src/dev/jest/mocks/file_mock.js', '^test_utils/(.*)': '/src/test_utils/public/$1', '^fixtures/(.*)': '/src/fixtures/$1', diff --git a/src/dev/jest/setup/mocks.js b/src/dev/jest/setup/mocks.js index 6e7160e858cd7..cea28d8abdbd1 100644 --- a/src/dev/jest/setup/mocks.js +++ b/src/dev/jest/setup/mocks.js @@ -34,10 +34,6 @@ * The mocks that are enabled that way live inside the `__mocks__` folders beside their implementation files. */ -jest.mock('ui/metadata'); -jest.mock('ui/documentation_links/documentation_links'); -jest.mock('ui/chrome'); - jest.mock('moment-timezone', () => { // We always want to mock the timezone moment-timezone guesses, since otherwise // test results might be depending on which time zone you are running them. diff --git a/src/dev/notice/generate_notice_from_source.ts b/src/dev/notice/generate_notice_from_source.ts index 0bef5bc5f32d4..9f7eb9d9e1aa4 100644 --- a/src/dev/notice/generate_notice_from_source.ts +++ b/src/dev/notice/generate_notice_from_source.ts @@ -41,7 +41,7 @@ interface Options { * into the repository. */ export async function generateNoticeFromSource({ productName, directory, log }: Options) { - const globs = ['**/*.{js,less,css,ts}']; + const globs = ['**/*.{js,less,css,ts,tsx}']; const options = { cwd: directory, diff --git a/src/dev/run_i18n_integrate.ts b/src/dev/run_i18n_integrate.ts index 25c3ea32783aa..c0b2302c91c54 100644 --- a/src/dev/run_i18n_integrate.ts +++ b/src/dev/run_i18n_integrate.ts @@ -111,6 +111,7 @@ run( const reporter = new ErrorReporter(); const messages: Map = new Map(); await list.run({ messages, reporter }); + process.exitCode = 0; } catch (error) { process.exitCode = 1; if (error instanceof ErrorReporter) { @@ -120,6 +121,7 @@ run( log.error(error); } } + process.exit(); }, { flags: { diff --git a/src/dev/typescript/build_refs.ts b/src/dev/typescript/build_refs.ts new file mode 100644 index 0000000000000..cbb596c185f8b --- /dev/null +++ b/src/dev/typescript/build_refs.ts @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import execa from 'execa'; +import { run, ToolingLog } from '@kbn/dev-utils'; + +export async function buildRefs(log: ToolingLog) { + try { + log.info('Building TypeScript projects refs...'); + await execa(require.resolve('typescript/bin/tsc'), ['-b', 'tsconfig.refs.json']); + } catch (e) { + log.error(e); + process.exit(1); + } +} + +export async function runBuildRefs() { + run(async ({ log }) => { + await buildRefs(log); + }); +} diff --git a/src/dev/typescript/projects.ts b/src/dev/typescript/projects.ts index 065321e355256..e18c82b5b9e96 100644 --- a/src/dev/typescript/projects.ts +++ b/src/dev/typescript/projects.ts @@ -24,6 +24,7 @@ import { Project } from './project'; export const PROJECTS = [ new Project(resolve(REPO_ROOT, 'tsconfig.json')), + new Project(resolve(REPO_ROOT, 'src/test_utils/tsconfig.json')), new Project(resolve(REPO_ROOT, 'test/tsconfig.json'), { name: 'kibana/test' }), new Project(resolve(REPO_ROOT, 'x-pack/tsconfig.json')), new Project(resolve(REPO_ROOT, 'x-pack/test/tsconfig.json'), { name: 'x-pack/test' }), diff --git a/src/dev/typescript/run_type_check_cli.ts b/src/dev/typescript/run_type_check_cli.ts index 9eeaeb4da7042..e1fca23274a5a 100644 --- a/src/dev/typescript/run_type_check_cli.ts +++ b/src/dev/typescript/run_type_check_cli.ts @@ -24,8 +24,9 @@ import getopts from 'getopts'; import { execInProjects } from './exec_in_projects'; import { filterProjectsByFlag } from './projects'; +import { buildRefs } from './build_refs'; -export function runTypeCheckCli() { +export async function runTypeCheckCli() { const extraFlags: string[] = []; const opts = getopts(process.argv.slice(2), { boolean: ['skip-lib-check', 'help'], @@ -79,7 +80,16 @@ export function runTypeCheckCli() { process.exit(); } - const tscArgs = ['--noEmit', '--pretty', ...(opts['skip-lib-check'] ? ['--skipLibCheck'] : [])]; + await buildRefs(log); + + const tscArgs = [ + // composite project cannot be used with --noEmit + ...['--composite', 'false'], + ...['--emitDeclarationOnly', 'false'], + '--noEmit', + '--pretty', + ...(opts['skip-lib-check'] ? ['--skipLibCheck'] : []), + ]; const projects = filterProjectsByFlag(opts.project).filter((p) => !p.disableTypeCheck); if (!projects.length) { diff --git a/src/fixtures/stubbed_logstash_index_pattern.js b/src/fixtures/stubbed_logstash_index_pattern.js index 5bb926799fcf6..5735b01eb3db4 100644 --- a/src/fixtures/stubbed_logstash_index_pattern.js +++ b/src/fixtures/stubbed_logstash_index_pattern.js @@ -21,7 +21,12 @@ import StubIndexPattern from 'test_utils/stub_index_pattern'; import stubbedLogstashFields from 'fixtures/logstash_fields'; import { getKbnFieldType } from '../plugins/data/common'; -import { npSetup } from '../legacy/ui/public/new_platform/new_platform.karma_mock'; +import { uiSettingsServiceMock } from '../core/public/ui_settings/ui_settings_service.mock'; + +const uiSettingSetupMock = uiSettingsServiceMock.createSetupContract(); +uiSettingSetupMock.get.mockImplementation((item, defaultValue) => { + return defaultValue; +}); export default function stubbedLogstashIndexPatternService() { const mockLogstashFields = stubbedLogstashFields(); @@ -41,13 +46,9 @@ export default function stubbedLogstashIndexPatternService() { }; }); - const indexPattern = new StubIndexPattern( - 'logstash-*', - (cfg) => cfg, - 'time', - fields, - npSetup.core - ); + const indexPattern = new StubIndexPattern('logstash-*', (cfg) => cfg, 'time', fields, { + uiSettings: uiSettingSetupMock, + }); indexPattern.id = 'logstash-*'; indexPattern.isTimeNanosBased = () => false; diff --git a/src/fixtures/stubbed_saved_object_index_pattern.ts b/src/fixtures/stubbed_saved_object_index_pattern.ts index 02e6cb85e341f..44b391f14cf9c 100644 --- a/src/fixtures/stubbed_saved_object_index_pattern.ts +++ b/src/fixtures/stubbed_saved_object_index_pattern.ts @@ -30,6 +30,7 @@ export function stubbedSavedObjectIndexPattern(id: string | null = null) { timeFieldName: 'timestamp', customFormats: '{}', fields: mockLogstashFields, + title: 'title', }, version: 2, }; diff --git a/src/legacy/core_plugins/elasticsearch/index.js b/src/legacy/core_plugins/elasticsearch/index.js index 599886788604b..f90f490d68035 100644 --- a/src/legacy/core_plugins/elasticsearch/index.js +++ b/src/legacy/core_plugins/elasticsearch/index.js @@ -16,18 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -import { first } from 'rxjs/operators'; import { Cluster } from './server/lib/cluster'; import { createProxy } from './server/lib/create_proxy'; export default function (kibana) { - let defaultVars; - return new kibana.Plugin({ require: [], - uiExports: { injectDefaultVars: () => defaultVars }, - async init(server) { // All methods that ES plugin exposes are synchronous so we should get the first // value from all observables here to be able to synchronously return and create @@ -36,16 +31,6 @@ export default function (kibana) { const adminCluster = new Cluster(client); const dataCluster = new Cluster(client); - const esConfig = await server.newPlatform.__internals.elasticsearch.legacy.config$ - .pipe(first()) - .toPromise(); - - defaultVars = { - esRequestTimeout: esConfig.requestTimeout.asMilliseconds(), - esShardTimeout: esConfig.shardTimeout.asMilliseconds(), - esApiVersion: esConfig.apiVersion, - }; - const clusters = new Map(); server.expose('getCluster', (name) => { if (name === 'admin') { diff --git a/src/legacy/core_plugins/kibana/index.js b/src/legacy/core_plugins/kibana/index.js deleted file mode 100644 index 722d75d00f78f..0000000000000 --- a/src/legacy/core_plugins/kibana/index.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { getUiSettingDefaults } from './server/ui_setting_defaults'; - -export default function (kibana) { - return new kibana.Plugin({ - id: 'kibana', - config: function (Joi) { - return Joi.object({ - enabled: Joi.boolean().default(true), - index: Joi.string().default('.kibana'), - autocompleteTerminateAfter: Joi.number().integer().min(1).default(100000), - // TODO Also allow units here like in elasticsearch config once this is moved to the new platform - autocompleteTimeout: Joi.number().integer().min(1).default(1000), - }).default(); - }, - - uiExports: { - uiSettingDefaults: getUiSettingDefaults(), - }, - }); -} diff --git a/src/legacy/core_plugins/kibana/package.json b/src/legacy/core_plugins/kibana/package.json deleted file mode 100644 index 94db646611df0..0000000000000 --- a/src/legacy/core_plugins/kibana/package.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "kibana", - "version": "kibana", - "config": { - "@elastic/eslint-import-resolver-kibana": { - "projectRoot": false - } - } -} diff --git a/src/legacy/core_plugins/kibana/public/index.scss b/src/legacy/core_plugins/kibana/public/index.scss deleted file mode 100644 index 7de0c8fc15f94..0000000000000 --- a/src/legacy/core_plugins/kibana/public/index.scss +++ /dev/null @@ -1,7 +0,0 @@ -// Elastic charts -@import '@elastic/charts/dist/theme'; -@import '@elastic/eui/src/themes/charts/theme'; - -// Public UI styles -@import 'src/legacy/ui/public/index'; - diff --git a/src/legacy/core_plugins/kibana/server/ui_setting_defaults.js b/src/legacy/core_plugins/kibana/server/ui_setting_defaults.js deleted file mode 100644 index 7de5fb581643a..0000000000000 --- a/src/legacy/core_plugins/kibana/server/ui_setting_defaults.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export function getUiSettingDefaults() { - // wrapped in provider so that a new instance is given to each app/test - return {}; -} diff --git a/src/legacy/deprecation/__tests__/create_transform.js b/src/legacy/deprecation/__tests__/create_transform.js deleted file mode 100644 index d3838da5c3399..0000000000000 --- a/src/legacy/deprecation/__tests__/create_transform.js +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { createTransform } from '../create_transform'; -import expect from '@kbn/expect'; -import sinon from 'sinon'; - -describe('deprecation', function () { - describe('createTransform', function () { - it(`doesn't modify settings parameter`, function () { - const settings = { - original: true, - }; - const deprecations = [ - (settings) => { - settings.original = false; - }, - ]; - createTransform(deprecations)(settings); - expect(settings.original).to.be(true); - }); - - it('calls single deprecation in array', function () { - const deprecations = [sinon.spy()]; - createTransform(deprecations)({}); - expect(deprecations[0].calledOnce).to.be(true); - }); - - it('calls multiple deprecations in array', function () { - const deprecations = [sinon.spy(), sinon.spy()]; - createTransform(deprecations)({}); - expect(deprecations[0].calledOnce).to.be(true); - expect(deprecations[1].calledOnce).to.be(true); - }); - - it('passes log function to deprecation', function () { - const deprecation = sinon.spy(); - const log = function () {}; - createTransform([deprecation])({}, log); - expect(deprecation.args[0][1]).to.be(log); - }); - }); -}); diff --git a/src/legacy/deprecation/create_transform.js b/src/legacy/deprecation/create_transform.js deleted file mode 100644 index 72e8e153ed819..0000000000000 --- a/src/legacy/deprecation/create_transform.js +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { deepCloneWithBuffers as clone } from '../utils'; -import { forEach, noop } from 'lodash'; - -export function createTransform(deprecations) { - return (settings, log = noop) => { - const result = clone(settings); - - forEach(deprecations, (deprecation) => { - deprecation(result, log); - }); - - return result; - }; -} diff --git a/src/legacy/deprecation/deprecations/__tests__/rename.js b/src/legacy/deprecation/deprecations/__tests__/rename.js deleted file mode 100644 index 47c6b3257ff69..0000000000000 --- a/src/legacy/deprecation/deprecations/__tests__/rename.js +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import { rename } from '../rename'; -import sinon from 'sinon'; - -describe('deprecation/deprecations', function () { - describe('rename', function () { - it('should rename simple property', function () { - const value = 'value'; - const settings = { - before: value, - }; - - rename('before', 'after')(settings); - expect(settings.before).to.be(undefined); - expect(settings.after).to.be(value); - }); - - it('should rename nested property', function () { - const value = 'value'; - const settings = { - someObject: { - before: value, - }, - }; - - rename('someObject.before', 'someObject.after')(settings); - expect(settings.someObject.before).to.be(undefined); - expect(settings.someObject.after).to.be(value); - }); - - it('should rename property, even when the value is null', function () { - const value = null; - const settings = { - before: value, - }; - - rename('before', 'after')(settings); - expect(settings.before).to.be(undefined); - expect(settings.after).to.be(null); - }); - - it(`shouldn't log when a rename doesn't occur`, function () { - const settings = { - exists: true, - }; - - const log = sinon.spy(); - rename('doesntExist', 'alsoDoesntExist')(settings, log); - expect(log.called).to.be(false); - }); - - it('should log when a rename does occur', function () { - const settings = { - exists: true, - }; - - const log = sinon.spy(); - rename('exists', 'alsoExists')(settings, log); - - expect(log.calledOnce).to.be(true); - expect(log.args[0][0]).to.match(/exists.+deprecated/); - }); - }); -}); diff --git a/src/legacy/deprecation/deprecations/__tests__/unused.js b/src/legacy/deprecation/deprecations/__tests__/unused.js deleted file mode 100644 index 4907c2b166989..0000000000000 --- a/src/legacy/deprecation/deprecations/__tests__/unused.js +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import { unused } from '../unused'; - -describe('deprecation/deprecations', function () { - describe('unused', function () { - it('should remove unused setting', function () { - const settings = { - old: true, - }; - - unused('old')(settings); - expect(settings.old).to.be(undefined); - }); - - it(`shouldn't remove used setting`, function () { - const value = 'value'; - const settings = { - new: value, - }; - - unused('old')(settings); - expect(settings.new).to.be(value); - }); - - it('should remove unused setting, even when null', function () { - const settings = { - old: null, - }; - - unused('old')(settings); - expect(settings.old).to.be(undefined); - }); - - it('should log when removing unused setting', function () { - const settings = { - old: true, - }; - - const log = sinon.spy(); - unused('old')(settings, log); - - expect(log.calledOnce).to.be(true); - expect(log.args[0][0]).to.match(/old.+deprecated/); - }); - - it(`shouldn't log when no setting is unused`, function () { - const settings = { - new: true, - }; - - const log = sinon.spy(); - unused('old')(settings, log); - expect(log.called).to.be(false); - }); - }); -}); diff --git a/src/legacy/deprecation/deprecations/index.js b/src/legacy/deprecation/deprecations/index.js deleted file mode 100644 index 527c99309ba80..0000000000000 --- a/src/legacy/deprecation/deprecations/index.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { rename } from './rename'; -export { unused } from './unused'; diff --git a/src/legacy/deprecation/deprecations/rename.js b/src/legacy/deprecation/deprecations/rename.js deleted file mode 100644 index c96b9146b4e2c..0000000000000 --- a/src/legacy/deprecation/deprecations/rename.js +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { set } from '@elastic/safer-lodash-set'; -import { get, isUndefined, noop } from 'lodash'; -import { unset } from '../../utils'; - -export function rename(oldKey, newKey) { - return (settings, log = noop) => { - const value = get(settings, oldKey); - if (isUndefined(value)) { - return; - } - - unset(settings, oldKey); - set(settings, newKey, value); - - log(`Config key "${oldKey}" is deprecated. It has been replaced with "${newKey}"`); - }; -} diff --git a/src/legacy/deprecation/deprecations/unused.js b/src/legacy/deprecation/deprecations/unused.js deleted file mode 100644 index 4291063dc482b..0000000000000 --- a/src/legacy/deprecation/deprecations/unused.js +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { get, isUndefined, noop } from 'lodash'; -import { unset } from '../../utils'; - -export function unused(oldKey) { - return (settings, log = noop) => { - const value = get(settings, oldKey); - if (isUndefined(value)) { - return; - } - - unset(settings, oldKey); - log(`${oldKey} is deprecated and is no longer used`); - }; -} diff --git a/src/legacy/deprecation/get_transform.js b/src/legacy/deprecation/get_transform.js deleted file mode 100644 index bf286901af62c..0000000000000 --- a/src/legacy/deprecation/get_transform.js +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { noop } from 'lodash'; - -import { createTransform } from './create_transform'; -import { rename, unused } from './deprecations'; - -export async function getTransform(spec) { - const deprecationsProvider = spec.getDeprecationsProvider() || noop; - if (!deprecationsProvider) return; - const transforms = (await deprecationsProvider({ rename, unused })) || []; - return createTransform(transforms); -} diff --git a/src/legacy/deprecation/index.js b/src/legacy/deprecation/index.js deleted file mode 100644 index 787563e7353ce..0000000000000 --- a/src/legacy/deprecation/index.js +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { rename, unused } from './deprecations'; - -export { createTransform } from './create_transform'; -export { getTransform } from './get_transform'; -export const Deprecations = { rename, unused }; diff --git a/src/legacy/plugin_discovery/plugin_config/__tests__/extend_config_service.js b/src/legacy/plugin_discovery/plugin_config/__tests__/extend_config_service.js index a74bfb872e99c..40f84f6f54b3b 100644 --- a/src/legacy/plugin_discovery/plugin_config/__tests__/extend_config_service.js +++ b/src/legacy/plugin_discovery/plugin_config/__tests__/extend_config_service.js @@ -45,10 +45,6 @@ describe('plugin discovery/extend config service', () => { enabled: Joi.boolean().default(true), test: Joi.string().default('bonk'), }).default(), - - deprecations({ rename }) { - return [rename('oldTest', 'test')]; - }, }), }) .getPluginSpecs() @@ -74,16 +70,14 @@ describe('plugin discovery/extend config service', () => { getConfigPrefix: sandbox.stub().returns(configPrefix), }; - const logDeprecation = sandbox.stub(); - const getSettings = sandbox.stub(SettingsNS, 'getSettings').returns(rootSettings.foo.bar); const getSchema = sandbox.stub(SchemaNS, 'getSchema').returns(schema); - await extendConfigService(pluginSpec, config, rootSettings, logDeprecation); + await extendConfigService(pluginSpec, config, rootSettings); sinon.assert.calledOnce(getSettings); - sinon.assert.calledWithExactly(getSettings, pluginSpec, rootSettings, logDeprecation); + sinon.assert.calledWithExactly(getSettings, pluginSpec, rootSettings); sinon.assert.calledOnce(getSchema); sinon.assert.calledWithExactly(getSchema, pluginSpec); @@ -145,41 +139,6 @@ describe('plugin discovery/extend config service', () => { expect(error.message).to.contain('"test" must be a string'); } }); - - it('calls logDeprecation() with deprecation messages', async () => { - const config = Config.withDefaultSchema(); - const logDeprecation = sinon.stub(); - await extendConfigService( - pluginSpec, - config, - { - foo: { - bar: { - baz: { - oldTest: '123', - }, - }, - }, - }, - logDeprecation - ); - sinon.assert.calledOnce(logDeprecation); - sinon.assert.calledWithExactly(logDeprecation, sinon.match('"oldTest" is deprecated')); - }); - - it('uses settings after transforming deprecations', async () => { - const config = Config.withDefaultSchema(); - await extendConfigService(pluginSpec, config, { - foo: { - bar: { - baz: { - oldTest: '123', - }, - }, - }, - }); - expect(config.get('foo.bar.baz.test')).to.be('123'); - }); }); describe('disableConfigExtension()', () => { diff --git a/src/legacy/plugin_discovery/plugin_config/__tests__/settings.js b/src/legacy/plugin_discovery/plugin_config/__tests__/settings.js index 2a26e29dfd63a..750c5ee6c6f50 100644 --- a/src/legacy/plugin_discovery/plugin_config/__tests__/settings.js +++ b/src/legacy/plugin_discovery/plugin_config/__tests__/settings.js @@ -18,7 +18,6 @@ */ import expect from '@kbn/expect'; -import sinon from 'sinon'; import { PluginPack } from '../../plugin_pack'; import { getSettings } from '../settings'; @@ -33,7 +32,6 @@ describe('plugin_discovery/settings', () => { provider: ({ Plugin }) => new Plugin({ configPrefix: 'a.b.c', - deprecations: ({ rename }) => [rename('foo', 'bar')], }), }) .getPluginSpecs() @@ -59,28 +57,5 @@ describe('plugin_discovery/settings', () => { it('allows rootSettings to be undefined', async () => { expect(await getSettings(pluginSpec)).to.eql(undefined); }); - - it('resolves deprecations', async () => { - const logDeprecation = sinon.stub(); - expect( - await getSettings( - pluginSpec, - { - a: { - b: { - c: { - foo: true, - }, - }, - }, - }, - logDeprecation - ) - ).to.eql({ - bar: true, - }); - - sinon.assert.calledOnce(logDeprecation); - }); }); }); diff --git a/src/legacy/plugin_discovery/plugin_config/extend_config_service.js b/src/legacy/plugin_discovery/plugin_config/extend_config_service.js index 9257227c0d62b..a6d5d4ae5f990 100644 --- a/src/legacy/plugin_discovery/plugin_config/extend_config_service.js +++ b/src/legacy/plugin_discovery/plugin_config/extend_config_service.js @@ -30,8 +30,8 @@ import { getSchema, getStubSchema } from './schema'; * @param {Function} [logDeprecation] * @return {Promise} */ -export async function extendConfigService(spec, config, rootSettings, logDeprecation) { - const settings = await getSettings(spec, rootSettings, logDeprecation); +export async function extendConfigService(spec, config, rootSettings) { + const settings = await getSettings(spec, rootSettings); const schema = await getSchema(spec); config.extendSchema(schema, settings, spec.getConfigPrefix()); } diff --git a/src/legacy/plugin_discovery/plugin_config/settings.js b/src/legacy/plugin_discovery/plugin_config/settings.js index 44ecb5718fe21..e6a4741d76eca 100644 --- a/src/legacy/plugin_discovery/plugin_config/settings.js +++ b/src/legacy/plugin_discovery/plugin_config/settings.js @@ -19,20 +19,16 @@ import { get } from 'lodash'; -import { getTransform } from '../../deprecation'; - /** * Get the settings for a pluginSpec from the raw root settings while * optionally calling logDeprecation() with warnings about deprecated * settings that were used * @param {PluginSpec} spec * @param {Object} rootSettings - * @param {Function} [logDeprecation] * @return {Promise} */ -export async function getSettings(spec, rootSettings, logDeprecation) { +export async function getSettings(spec, rootSettings) { const prefix = spec.getConfigPrefix(); const rawSettings = get(rootSettings, prefix); - const transform = await getTransform(spec); - return transform(rawSettings, logDeprecation); + return rawSettings; } diff --git a/src/legacy/plugin_discovery/plugin_spec/plugin_spec_options.d.ts b/src/legacy/plugin_discovery/plugin_spec/plugin_spec_options.d.ts index e51a355cbc8d2..e1ed2f57375a4 100644 --- a/src/legacy/plugin_discovery/plugin_spec/plugin_spec_options.d.ts +++ b/src/legacy/plugin_discovery/plugin_spec/plugin_spec_options.d.ts @@ -18,14 +18,10 @@ */ import { Server } from '../../server/kbn_server'; import { Capabilities } from '../../../core/server'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { SavedObjectsLegacyManagementDefinition } from '../../../core/server/saved_objects/types'; export type InitPluginFunction = (server: Server) => void; export interface UiExports { injectDefaultVars?: (server: Server) => { [key: string]: any }; - savedObjectsManagement?: SavedObjectsLegacyManagementDefinition; - mappings?: unknown; } export interface PluginSpecOptions { diff --git a/src/legacy/plugin_discovery/types.ts b/src/legacy/plugin_discovery/types.ts index 283806f69599a..700ca6fa68c95 100644 --- a/src/legacy/plugin_discovery/types.ts +++ b/src/legacy/plugin_discovery/types.ts @@ -19,11 +19,6 @@ import { Server } from '../server/kbn_server'; import { Capabilities } from '../../core/server'; -// Disable lint errors for imports from src/core/* until SavedObjects migration is complete -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { SavedObjectsSchemaDefinition } from '../../core/server/saved_objects/schema'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { SavedObjectsLegacyManagementDefinition } from '../../core/server/saved_objects/types'; import { AppCategory } from '../../core/types'; /** @@ -70,8 +65,6 @@ export interface LegacyPluginOptions { home: string[]; mappings: any; migrations: any; - savedObjectSchemas: SavedObjectsSchemaDefinition; - savedObjectsManagement: SavedObjectsLegacyManagementDefinition; visTypes: string[]; embeddableActions?: string[]; embeddableFactories?: string[]; diff --git a/src/legacy/server/capabilities/capabilities_mixin.test.ts b/src/legacy/server/capabilities/capabilities_mixin.test.ts deleted file mode 100644 index 3422d6a8cbb34..0000000000000 --- a/src/legacy/server/capabilities/capabilities_mixin.test.ts +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Server } from 'hapi'; -import KbnServer from '../kbn_server'; - -import { capabilitiesMixin } from './capabilities_mixin'; - -describe('capabilitiesMixin', () => { - let registerMock: jest.Mock; - - const getKbnServer = (pluginSpecs: any[] = []) => { - return ({ - afterPluginsInit: (callback: () => void) => callback(), - pluginSpecs, - newPlatform: { - setup: { - core: { - capabilities: { - registerProvider: registerMock, - }, - }, - }, - }, - } as unknown) as KbnServer; - }; - - let server: Server; - beforeEach(() => { - server = new Server(); - server.getUiNavLinks = () => []; - registerMock = jest.fn(); - }); - - it('calls capabilities#registerCapabilitiesProvider for each legacy plugin specs', async () => { - const getPluginSpec = (provider: () => any) => ({ - getUiCapabilitiesProvider: () => provider, - }); - - const capaA = { catalogue: { A: true } }; - const capaB = { catalogue: { B: true } }; - const kbnServer = getKbnServer([getPluginSpec(() => capaA), getPluginSpec(() => capaB)]); - await capabilitiesMixin(kbnServer, server); - - expect(registerMock).toHaveBeenCalledTimes(2); - expect(registerMock.mock.calls[0][0]()).toEqual(capaA); - expect(registerMock.mock.calls[1][0]()).toEqual(capaB); - }); -}); diff --git a/src/legacy/server/capabilities/capabilities_mixin.ts b/src/legacy/server/capabilities/capabilities_mixin.ts deleted file mode 100644 index 1f8c869f17f66..0000000000000 --- a/src/legacy/server/capabilities/capabilities_mixin.ts +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Server } from 'hapi'; -import KbnServer from '../kbn_server'; - -export async function capabilitiesMixin(kbnServer: KbnServer, server: Server) { - const registerLegacyCapabilities = async () => { - const capabilitiesList = await Promise.all( - kbnServer.pluginSpecs - .map((spec) => spec.getUiCapabilitiesProvider()) - .filter((provider) => !!provider) - .map((provider) => provider(server)) - ); - - capabilitiesList.forEach((capabilities) => { - kbnServer.newPlatform.setup.core.capabilities.registerProvider(() => capabilities); - }); - }; - - // Some plugin capabilities are derived from data provided by other plugins, - // so we need to wait until after all plugins have been init'd to fetch uiCapabilities. - kbnServer.afterPluginsInit(async () => { - await registerLegacyCapabilities(); - }); -} diff --git a/src/legacy/server/capabilities/index.ts b/src/legacy/server/capabilities/index.ts deleted file mode 100644 index 8c5dea1226f2b..0000000000000 --- a/src/legacy/server/capabilities/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { capabilitiesMixin } from './capabilities_mixin'; diff --git a/src/legacy/server/config/schema.js b/src/legacy/server/config/schema.js index 6cd3f8dc448b0..dd65e45659ffc 100644 --- a/src/legacy/server/config/schema.js +++ b/src/legacy/server/config/schema.js @@ -231,6 +231,15 @@ export default () => locale: Joi.string().default('en'), }).default(), + // temporarily moved here from the (now deleted) kibana legacy plugin + kibana: Joi.object({ + enabled: Joi.boolean().default(true), + index: Joi.string().default('.kibana'), + autocompleteTerminateAfter: Joi.number().integer().min(1).default(100000), + // TODO Also allow units here like in elasticsearch config once this is moved to the new platform + autocompleteTimeout: Joi.number().integer().min(1).default(1000), + }).default(), + savedObjects: Joi.object({ maxImportPayloadBytes: Joi.number().default(10485760), maxImportExportSize: Joi.number().default(10000), diff --git a/src/legacy/server/http/integration_tests/max_payload_size.test.js b/src/legacy/server/http/integration_tests/max_payload_size.test.js index 789a54f681ba6..2d0718dd35606 100644 --- a/src/legacy/server/http/integration_tests/max_payload_size.test.js +++ b/src/legacy/server/http/integration_tests/max_payload_size.test.js @@ -17,7 +17,7 @@ * under the License. */ -import * as kbnTestServer from '../../../../test_utils/kbn_server'; +import * as kbnTestServer from '../../../../core/test_helpers/kbn_server'; let root; beforeAll(async () => { diff --git a/src/legacy/server/i18n/index.ts b/src/legacy/server/i18n/index.ts index 09f7022436049..e895f83fe6901 100644 --- a/src/legacy/server/i18n/index.ts +++ b/src/legacy/server/i18n/index.ts @@ -20,7 +20,6 @@ import { i18n, i18nLoader } from '@kbn/i18n'; import { basename } from 'path'; import { Server } from 'hapi'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths import { fromRoot } from '../../../core/server/utils'; import { getTranslationPaths } from './get_translations_path'; import { I18N_RC } from './constants'; diff --git a/src/legacy/server/kbn_server.d.ts b/src/legacy/server/kbn_server.d.ts index 1a1f43b93f26e..663542618375a 100644 --- a/src/legacy/server/kbn_server.d.ts +++ b/src/legacy/server/kbn_server.d.ts @@ -17,43 +17,30 @@ * under the License. */ -import { ResponseObject, Server } from 'hapi'; -import { UnwrapPromise } from '@kbn/utility-types'; +import { Server } from 'hapi'; import { TelemetryCollectionManagerPluginSetup } from 'src/plugins/telemetry_collection_manager/server'; import { - ConfigService, CoreSetup, CoreStart, - ElasticsearchServiceSetup, EnvironmentMode, LoggerFactory, - SavedObjectsClientContract, - SavedObjectsLegacyService, - SavedObjectsClientProviderOptions, - IUiSettingsClient, PackageInfo, - LegacyRequest, LegacyServiceSetupDeps, - LegacyServiceStartDeps, LegacyServiceDiscoverPlugins, } from '../../core/server'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { LegacyConfig, ILegacyService, ILegacyInternals } from '../../core/server/legacy'; +import { LegacyConfig, ILegacyInternals } from '../../core/server/legacy'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { UiPlugins } from '../../core/server/plugins'; -import { CallClusterWithRequest, ElasticsearchPlugin } from '../core_plugins/elasticsearch'; +import { ElasticsearchPlugin } from '../core_plugins/elasticsearch'; import { UsageCollectionSetup } from '../../plugins/usage_collection/server'; import { HomeServerPluginSetup } from '../../plugins/home/server'; // lot of legacy code was assuming this type only had these two methods export type KibanaConfig = Pick; -export interface UiApp { - getId(): string; -} - // Extend the defaults with the plugins and server methods we need. declare module 'hapi' { interface PluginProperties { @@ -65,31 +52,9 @@ declare module 'hapi' { interface Server { config: () => KibanaConfig; - savedObjects: SavedObjectsLegacyService; - injectUiAppVars: (pluginName: string, getAppVars: () => { [key: string]: any }) => void; - getHiddenUiAppById(appId: string): UiApp; - addScopedTutorialContextFactory: ( - scopedTutorialContextFactory: (...args: any[]) => any - ) => void; - getInjectedUiAppVars: (pluginName: string) => { [key: string]: any }; - getUiNavLinks(): Array<{ _id: string }>; - addMemoizedFactoryToRequest: ( - name: string, - factoryFn: (request: Request) => Record - ) => void; logWithMetadata: (tags: string[], message: string, meta: Record) => void; newPlatform: KbnServer['newPlatform']; } - - interface Request { - getSavedObjectsClient(options?: SavedObjectsClientProviderOptions): SavedObjectsClientContract; - getBasePath(): string; - getUiSettingsService(): IUiSettingsClient; - } - - interface ResponseToolkit { - renderAppWithDefaultConfig(app: UiApp): ResponseObject; - } } type KbnMixinFunc = (kbnServer: KbnServer, server: Server, config: any) => Promise | void; @@ -105,11 +70,9 @@ export interface KibanaCore { __internals: { elasticsearch: LegacyServiceSetupDeps['core']['elasticsearch']; hapiServer: LegacyServiceSetupDeps['core']['http']['server']; - kibanaMigrator: LegacyServiceStartDeps['core']['savedObjects']['migrator']; legacy: ILegacyInternals; rendering: LegacyServiceSetupDeps['core']['rendering']; uiPlugins: UiPlugins; - savedObjectsClientProvider: LegacyServiceStartDeps['core']['savedObjects']['clientProvider']; }; env: { mode: Readonly; @@ -168,6 +131,3 @@ export default class KbnServer { // Re-export commonly used hapi types. export { Server, Request, ResponseToolkit } from 'hapi'; - -// Re-export commonly accessed api types. -export { SavedObjectsLegacyService, SavedObjectsClient } from 'src/core/server'; diff --git a/src/legacy/server/kbn_server.js b/src/legacy/server/kbn_server.js index 1084521235ea0..a5eefd140c8fa 100644 --- a/src/legacy/server/kbn_server.js +++ b/src/legacy/server/kbn_server.js @@ -33,9 +33,6 @@ import pidMixin from './pid'; import configCompleteMixin from './config/complete'; import { optimizeMixin } from '../../optimize'; import * as Plugins from './plugins'; -import { savedObjectsMixin } from './saved_objects/saved_objects_mixin'; -import { capabilitiesMixin } from './capabilities'; -import { serverExtensionsMixin } from './server_extensions'; import { uiMixin } from '../ui'; import { i18nMixin } from './i18n'; @@ -92,8 +89,6 @@ export default class KbnServer { coreMixin, - // adds methods for extending this.server - serverExtensionsMixin, loggingMixin, warningsMixin, statusMixin, @@ -112,12 +107,6 @@ export default class KbnServer { uiMixin, - // setup saved object routes - savedObjectsMixin, - - // setup capabilities routes - capabilitiesMixin, - // setup routes that serve the @kbn/optimizer output optimizeMixin, diff --git a/src/legacy/server/logging/log_format.js b/src/legacy/server/logging/log_format.js index 8a80cbef1a9c5..6edda8c4be907 100644 --- a/src/legacy/server/logging/log_format.js +++ b/src/legacy/server/logging/log_format.js @@ -91,7 +91,7 @@ export default class TransformObjStream extends Stream.Transform { method: event.method || '', headers: event.headers, remoteAddress: source.remoteAddress, - userAgent: source.remoteAddress, + userAgent: source.userAgent, referer: source.referer, }; diff --git a/src/legacy/server/logging/log_format_json.test.js b/src/legacy/server/logging/log_format_json.test.js index 31e622ecae611..ec7296d21672b 100644 --- a/src/legacy/server/logging/log_format_json.test.js +++ b/src/legacy/server/logging/log_format_json.test.js @@ -21,7 +21,7 @@ import moment from 'moment'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { attachMetaData } from '../../../../src/core/server/legacy/logging/legacy_logging_server'; -import { createListStream, createPromiseFromStreams } from '../../utils'; +import { createListStream, createPromiseFromStreams } from '../../../core/server/utils'; import KbnLoggerJsonFormat from './log_format_json'; @@ -65,12 +65,14 @@ describe('KbnLoggerJsonFormat', () => { }, }; const result = await createPromiseFromStreams([createListStream([event]), format]); - const { type, method, statusCode, message } = JSON.parse(result); + const { type, method, statusCode, message, req } = JSON.parse(result); expect(type).toBe('response'); expect(method).toBe('GET'); expect(statusCode).toBe(200); expect(message).toBe('GET /path/to/resource 200 12000ms - 13.0B'); + expect(req.remoteAddress).toBe('127.0.0.1'); + expect(req.userAgent).toBe('Test Thing'); }); it('ops', async () => { diff --git a/src/legacy/server/logging/log_format_string.test.js b/src/legacy/server/logging/log_format_string.test.js index 067ad70380961..842325865cce2 100644 --- a/src/legacy/server/logging/log_format_string.test.js +++ b/src/legacy/server/logging/log_format_string.test.js @@ -21,7 +21,7 @@ import moment from 'moment'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { attachMetaData } from '../../../../src/core/server/legacy/logging/legacy_logging_server'; -import { createListStream, createPromiseFromStreams } from '../../utils'; +import { createListStream, createPromiseFromStreams } from '../../../core/server/utils'; import KbnLoggerStringFormat from './log_format_string'; diff --git a/src/legacy/server/saved_objects/saved_objects_mixin.js b/src/legacy/server/saved_objects/saved_objects_mixin.js deleted file mode 100644 index 185c8807ae8b5..0000000000000 --- a/src/legacy/server/saved_objects/saved_objects_mixin.js +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// Disable lint errors for imports from src/core/server/saved_objects until SavedObjects migration is complete -/* eslint-disable @kbn/eslint/no-restricted-paths */ -import { SavedObjectsSchema } from '../../../core/server/saved_objects/schema'; -import { - SavedObjectsClient, - SavedObjectsRepository, - exportSavedObjectsToStream, - importSavedObjectsFromStream, - resolveSavedObjectsImportErrors, -} from '../../../core/server/saved_objects'; -import { convertTypesToLegacySchema } from '../../../core/server/saved_objects/utils'; - -export function savedObjectsMixin(kbnServer, server) { - const migrator = kbnServer.newPlatform.__internals.kibanaMigrator; - const typeRegistry = kbnServer.newPlatform.start.core.savedObjects.getTypeRegistry(); - const mappings = migrator.getActiveMappings(); - const allTypes = typeRegistry.getAllTypes().map((t) => t.name); - const visibleTypes = typeRegistry.getVisibleTypes().map((t) => t.name); - const schema = new SavedObjectsSchema(convertTypesToLegacySchema(typeRegistry.getAllTypes())); - - server.decorate('server', 'kibanaMigrator', migrator); - - const warn = (message) => server.log(['warning', 'saved-objects'], message); - // we use kibana.index which is technically defined in the kibana plugin, so if - // we don't have the plugin (mainly tests) we can't initialize the saved objects - if (!kbnServer.pluginSpecs.some((p) => p.getId() === 'kibana')) { - warn('Saved Objects uninitialized because the Kibana plugin is disabled.'); - return; - } - - const serializer = kbnServer.newPlatform.start.core.savedObjects.createSerializer(); - - const createRepository = (callCluster, includedHiddenTypes = []) => { - if (typeof callCluster !== 'function') { - throw new TypeError('Repository requires a "callCluster" function to be provided.'); - } - // throw an exception if an extraType is not defined. - includedHiddenTypes.forEach((type) => { - if (!allTypes.includes(type)) { - throw new Error(`Missing mappings for saved objects type '${type}'`); - } - }); - const combinedTypes = visibleTypes.concat(includedHiddenTypes); - const allowedTypes = [...new Set(combinedTypes)]; - - const config = server.config(); - - return new SavedObjectsRepository({ - index: config.get('kibana.index'), - migrator, - mappings, - typeRegistry, - serializer, - allowedTypes, - callCluster, - }); - }; - - const provider = kbnServer.newPlatform.__internals.savedObjectsClientProvider; - - const service = { - types: visibleTypes, - SavedObjectsClient, - SavedObjectsRepository, - getSavedObjectsRepository: createRepository, - getScopedSavedObjectsClient: (...args) => provider.getClient(...args), - setScopedSavedObjectsClientFactory: (...args) => provider.setClientFactory(...args), - addScopedSavedObjectsClientWrapperFactory: (...args) => - provider.addClientWrapperFactory(...args), - importExport: { - objectLimit: server.config().get('savedObjects.maxImportExportSize'), - importSavedObjects: importSavedObjectsFromStream, - resolveImportErrors: resolveSavedObjectsImportErrors, - getSortedObjectsForExport: exportSavedObjectsToStream, - }, - schema, - }; - server.decorate('server', 'savedObjects', service); - - const savedObjectsClientCache = new WeakMap(); - server.decorate('request', 'getSavedObjectsClient', function (options) { - const request = this; - - if (savedObjectsClientCache.has(request)) { - return savedObjectsClientCache.get(request); - } - - const savedObjectsClient = server.savedObjects.getScopedSavedObjectsClient(request, options); - - savedObjectsClientCache.set(request, savedObjectsClient); - return savedObjectsClient; - }); -} diff --git a/src/legacy/server/saved_objects/saved_objects_mixin.test.js b/src/legacy/server/saved_objects/saved_objects_mixin.test.js deleted file mode 100644 index 63e4a632ab5e0..0000000000000 --- a/src/legacy/server/saved_objects/saved_objects_mixin.test.js +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { savedObjectsMixin } from './saved_objects_mixin'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { mockKibanaMigrator } from '../../../core/server/saved_objects/migrations/kibana/kibana_migrator.mock'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { savedObjectsClientProviderMock } from '../../../core/server/saved_objects/service/lib/scoped_client_provider.mock'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { convertLegacyTypes } from '../../../core/server/saved_objects/utils'; -import { SavedObjectTypeRegistry } from '../../../core/server'; -import { coreMock } from '../../../core/server/mocks'; - -const mockConfig = { - get: jest.fn().mockReturnValue('anything'), -}; - -const savedObjectMappings = [ - { - pluginId: 'testtype', - properties: { - testtype: { - properties: { - name: { type: 'keyword' }, - }, - }, - }, - }, - { - pluginId: 'testtype2', - properties: { - doc1: { - properties: { - name: { type: 'keyword' }, - }, - }, - doc2: { - properties: { - name: { type: 'keyword' }, - }, - }, - }, - }, - { - pluginId: 'secretPlugin', - properties: { - hiddentype: { - properties: { - secret: { type: 'keyword' }, - }, - }, - }, - }, -]; - -const savedObjectSchemas = { - hiddentype: { - hidden: true, - }, - doc1: { - indexPattern: 'other-index', - }, -}; - -const savedObjectTypes = convertLegacyTypes( - { - savedObjectMappings, - savedObjectSchemas, - savedObjectMigrations: {}, - }, - mockConfig -); - -const typeRegistry = new SavedObjectTypeRegistry(); -savedObjectTypes.forEach((type) => typeRegistry.registerType(type)); - -const migrator = mockKibanaMigrator.create({ - types: savedObjectTypes, -}); - -describe('Saved Objects Mixin', () => { - let mockKbnServer; - let mockServer; - const mockCallCluster = jest.fn(); - const stubCallCluster = jest.fn(); - const config = { - 'kibana.index': 'kibana.index', - 'savedObjects.maxImportExportSize': 10000, - }; - const stubConfig = jest.fn((key) => { - return config[key]; - }); - - beforeEach(() => { - const clientProvider = savedObjectsClientProviderMock.create(); - mockServer = { - log: jest.fn(), - route: jest.fn(), - decorate: jest.fn(), - config: () => { - return { - get: stubConfig, - }; - }, - plugins: { - elasticsearch: { - getCluster: () => { - return { - callWithRequest: mockCallCluster, - callWithInternalUser: stubCallCluster, - }; - }, - waitUntilReady: jest.fn(), - }, - }, - }; - - const coreStart = coreMock.createStart(); - coreStart.savedObjects.getTypeRegistry.mockReturnValue(typeRegistry); - - mockKbnServer = { - newPlatform: { - __internals: { - kibanaMigrator: migrator, - savedObjectsClientProvider: clientProvider, - }, - setup: { - core: coreMock.createSetup(), - }, - start: { - core: coreStart, - }, - }, - server: mockServer, - ready: () => {}, - pluginSpecs: { - some: () => { - return true; - }, - }, - uiExports: { - savedObjectMappings, - savedObjectSchemas, - }, - }; - }); - - describe('no kibana plugin', () => { - it('should not try to create anything', () => { - mockKbnServer.pluginSpecs.some = () => false; - savedObjectsMixin(mockKbnServer, mockServer); - expect(mockServer.log).toHaveBeenCalledWith(expect.any(Array), expect.any(String)); - expect(mockServer.decorate).toHaveBeenCalledWith( - 'server', - 'kibanaMigrator', - expect.any(Object) - ); - expect(mockServer.decorate).toHaveBeenCalledTimes(1); - expect(mockServer.route).not.toHaveBeenCalled(); - }); - }); - - describe('Saved object service', () => { - let service; - - beforeEach(async () => { - await savedObjectsMixin(mockKbnServer, mockServer); - const call = mockServer.decorate.mock.calls.filter( - ([objName, methodName]) => objName === 'server' && methodName === 'savedObjects' - ); - service = call[0][2]; - }); - - it('should return all but hidden types', async () => { - expect(service).toBeDefined(); - expect(service.types).toEqual(['testtype', 'doc1', 'doc2']); - }); - - const mockCallEs = jest.fn(); - describe('repository creation', () => { - it('should not allow a repository with an undefined type', () => { - expect(() => { - service.getSavedObjectsRepository(mockCallEs, ['extraType']); - }).toThrow(new Error("Missing mappings for saved objects type 'extraType'")); - }); - - it('should create a repository without hidden types', () => { - const repository = service.getSavedObjectsRepository(mockCallEs); - expect(repository).toBeDefined(); - expect(repository._allowedTypes).toEqual(['testtype', 'doc1', 'doc2']); - }); - - it('should create a repository with a unique list of allowed types', () => { - const repository = service.getSavedObjectsRepository(mockCallEs, ['doc1', 'doc1', 'doc1']); - expect(repository._allowedTypes).toEqual(['testtype', 'doc1', 'doc2']); - }); - - it('should create a repository with extraTypes minus duplicate', () => { - const repository = service.getSavedObjectsRepository(mockCallEs, [ - 'hiddentype', - 'hiddentype', - ]); - expect(repository._allowedTypes).toEqual(['testtype', 'doc1', 'doc2', 'hiddentype']); - }); - - it('should not allow a repository without a callCluster function', () => { - expect(() => { - service.getSavedObjectsRepository({}); - }).toThrow(new Error('Repository requires a "callCluster" function to be provided.')); - }); - }); - - describe('get client', () => { - it('should have a method to get the client', () => { - expect(service).toHaveProperty('getScopedSavedObjectsClient'); - }); - - it('should have a method to set the client factory', () => { - expect(service).toHaveProperty('setScopedSavedObjectsClientFactory'); - }); - - it('should have a method to add a client wrapper factory', () => { - expect(service).toHaveProperty('addScopedSavedObjectsClientWrapperFactory'); - }); - - it('should allow you to set a scoped saved objects client factory', () => { - expect(() => { - service.setScopedSavedObjectsClientFactory({}); - }).not.toThrowError(); - }); - - it('should allow you to add a scoped saved objects client wrapper factory', () => { - expect(() => { - service.addScopedSavedObjectsClientWrapperFactory({}); - }).not.toThrowError(); - }); - }); - - describe('#getSavedObjectsClient', () => { - let getSavedObjectsClient; - - beforeEach(() => { - savedObjectsMixin(mockKbnServer, mockServer); - const call = mockServer.decorate.mock.calls.filter( - ([objName, methodName]) => objName === 'request' && methodName === 'getSavedObjectsClient' - ); - getSavedObjectsClient = call[0][2]; - }); - - it('should be callable', () => { - mockServer.savedObjects = service; - getSavedObjectsClient = getSavedObjectsClient.bind({}); - expect(() => { - getSavedObjectsClient(); - }).not.toThrowError(); - }); - - it('should use cached request object', () => { - mockServer.savedObjects = service; - getSavedObjectsClient = getSavedObjectsClient.bind({ _test: 'me' }); - const savedObjectsClient = getSavedObjectsClient(); - expect(getSavedObjectsClient()).toEqual(savedObjectsClient); - }); - }); - }); -}); diff --git a/src/legacy/server/server_extensions/add_memoized_factory_to_request.test.js b/src/legacy/server/server_extensions/add_memoized_factory_to_request.test.js deleted file mode 100644 index 48bd082468061..0000000000000 --- a/src/legacy/server/server_extensions/add_memoized_factory_to_request.test.js +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; - -import { serverExtensionsMixin } from './server_extensions_mixin'; - -describe('server.addMemoizedFactoryToRequest()', () => { - const setup = () => { - class Request {} - - class Server { - constructor() { - sinon.spy(this, 'decorate'); - } - decorate(type, name, value) { - switch (type) { - case 'request': - return (Request.prototype[name] = value); - case 'server': - return (Server.prototype[name] = value); - default: - throw new Error(`Unexpected decorate type ${type}`); - } - } - } - - const server = new Server(); - serverExtensionsMixin({}, server); - return { server, Request }; - }; - - it('throws when propertyName is not a string', () => { - const { server } = setup(); - expect(() => server.addMemoizedFactoryToRequest()).toThrowError('methodName must be a string'); - expect(() => server.addMemoizedFactoryToRequest(null)).toThrowError( - 'methodName must be a string' - ); - expect(() => server.addMemoizedFactoryToRequest(1)).toThrowError('methodName must be a string'); - expect(() => server.addMemoizedFactoryToRequest(true)).toThrowError( - 'methodName must be a string' - ); - expect(() => server.addMemoizedFactoryToRequest(/abc/)).toThrowError( - 'methodName must be a string' - ); - expect(() => server.addMemoizedFactoryToRequest(['foo'])).toThrowError( - 'methodName must be a string' - ); - expect(() => server.addMemoizedFactoryToRequest([1])).toThrowError( - 'methodName must be a string' - ); - expect(() => server.addMemoizedFactoryToRequest({})).toThrowError( - 'methodName must be a string' - ); - }); - - it('throws when factory is not a function', () => { - const { server } = setup(); - expect(() => server.addMemoizedFactoryToRequest('name')).toThrowError( - 'factory must be a function' - ); - expect(() => server.addMemoizedFactoryToRequest('name', null)).toThrowError( - 'factory must be a function' - ); - expect(() => server.addMemoizedFactoryToRequest('name', 1)).toThrowError( - 'factory must be a function' - ); - expect(() => server.addMemoizedFactoryToRequest('name', true)).toThrowError( - 'factory must be a function' - ); - expect(() => server.addMemoizedFactoryToRequest('name', /abc/)).toThrowError( - 'factory must be a function' - ); - expect(() => server.addMemoizedFactoryToRequest('name', ['foo'])).toThrowError( - 'factory must be a function' - ); - expect(() => server.addMemoizedFactoryToRequest('name', [1])).toThrowError( - 'factory must be a function' - ); - expect(() => server.addMemoizedFactoryToRequest('name', {})).toThrowError( - 'factory must be a function' - ); - }); - - it('throws when factory takes more than one arg', () => { - const { server } = setup(); - /* eslint-disable no-unused-vars */ - expect(() => server.addMemoizedFactoryToRequest('name', () => {})).not.toThrowError( - 'more than one argument' - ); - expect(() => server.addMemoizedFactoryToRequest('name', (a) => {})).not.toThrowError( - 'more than one argument' - ); - expect(() => server.addMemoizedFactoryToRequest('name', (a, b) => {})).toThrowError( - 'more than one argument' - ); - expect(() => server.addMemoizedFactoryToRequest('name', (a, b, c) => {})).toThrowError( - 'more than one argument' - ); - expect(() => server.addMemoizedFactoryToRequest('name', (a, b, c, d) => {})).toThrowError( - 'more than one argument' - ); - expect(() => server.addMemoizedFactoryToRequest('name', (a, b, c, d, e) => {})).toThrowError( - 'more than one argument' - ); - /* eslint-enable no-unused-vars */ - }); - - it('decorates request objects with a function at `propertyName`', () => { - const { server, Request } = setup(); - - expect(new Request()).not.toHaveProperty('decorated'); - server.addMemoizedFactoryToRequest('decorated', () => {}); - expect(typeof new Request().decorated).toBe('function'); - }); - - it('caches invocations of the factory to the request instance', () => { - const { server, Request } = setup(); - const factory = sinon.stub().returnsArg(0); - server.addMemoizedFactoryToRequest('foo', factory); - - const request1 = new Request(); - const request2 = new Request(); - - // call `foo()` on both requests a bunch of times, each time - // the return value should be exactly the same - expect(request1.foo()).toBe(request1); - expect(request1.foo()).toBe(request1); - expect(request1.foo()).toBe(request1); - expect(request1.foo()).toBe(request1); - expect(request1.foo()).toBe(request1); - expect(request1.foo()).toBe(request1); - - expect(request2.foo()).toBe(request2); - expect(request2.foo()).toBe(request2); - expect(request2.foo()).toBe(request2); - expect(request2.foo()).toBe(request2); - - // only two different requests, so factory should have only been - // called twice with the two request objects - sinon.assert.calledTwice(factory); - sinon.assert.calledWithExactly(factory, request1); - sinon.assert.calledWithExactly(factory, request2); - }); -}); diff --git a/src/legacy/server/server_extensions/index.js b/src/legacy/server/server_extensions/index.js deleted file mode 100644 index e17bd488897f7..0000000000000 --- a/src/legacy/server/server_extensions/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { serverExtensionsMixin } from './server_extensions_mixin'; diff --git a/src/legacy/server/server_extensions/server_extensions_mixin.js b/src/legacy/server/server_extensions/server_extensions_mixin.js deleted file mode 100644 index 19c0b24ae15a1..0000000000000 --- a/src/legacy/server/server_extensions/server_extensions_mixin.js +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export function serverExtensionsMixin(kbnServer, server) { - /** - * Decorate all request objects with a new method, `methodName`, - * that will call the `factory` on first invocation and return - * the result of the first call to subsequent invocations. - * - * @method server.addMemoizedFactoryToRequest - * @param {string} methodName location on the request this - * factory should be added - * @param {Function} factory the factory to add to the request, - * which will be called once per request - * with a single argument, the request. - * @return {undefined} - */ - server.decorate('server', 'addMemoizedFactoryToRequest', (methodName, factory) => { - if (typeof methodName !== 'string') { - throw new TypeError('methodName must be a string'); - } - - if (typeof factory !== 'function') { - throw new TypeError('factory must be a function'); - } - - if (factory.length > 1) { - throw new TypeError(` - factory must not take more than one argument, the request object. - Memoization is done based on the request instance and is cached and reused - regardless of other arguments. If you want to have a per-request cache that - also does some sort of secondary memoization then return an object or function - from the memoized decorator and do secondary memoization there. - `); - } - - const requestCache = new WeakMap(); - server.decorate('request', methodName, function () { - const request = this; - - if (!requestCache.has(request)) { - requestCache.set(request, factory(request)); - } - - return requestCache.get(request); - }); - }); -} diff --git a/src/legacy/ui/public/.eslintrc b/src/legacy/ui/public/.eslintrc deleted file mode 100644 index cc44af915ba25..0000000000000 --- a/src/legacy/ui/public/.eslintrc +++ /dev/null @@ -1,3 +0,0 @@ -rules: - no-console: 2 - 'import/no-default-export': error diff --git a/src/legacy/ui/public/__mocks__/metadata.ts b/src/legacy/ui/public/__mocks__/metadata.ts deleted file mode 100644 index b7f944de27463..0000000000000 --- a/src/legacy/ui/public/__mocks__/metadata.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { metadata as metadataImpl } from '../metadata'; - -export const metadata: typeof metadataImpl = { - branch: 'jest-metadata-mock-branch', - version: '42.23.26', -}; diff --git a/src/legacy/ui/public/__tests__/events.js b/src/legacy/ui/public/__tests__/events.js deleted file mode 100644 index c225c2a8ac1c0..0000000000000 --- a/src/legacy/ui/public/__tests__/events.js +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import sinon from 'sinon'; -import ngMock from 'ng_mock'; -import { EventsProvider } from '../events'; -import expect from '@kbn/expect'; -import '../private'; -import { createDefer } from 'ui/promises'; -import { createLegacyClass } from '../utils/legacy_class'; - -describe('Events', function () { - require('test_utils/no_digest_promises').activateForSuite(); - - let Events; - let Promise; - let eventsInstance; - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function ($injector, Private) { - Promise = $injector.get('Promise'); - Events = Private(EventsProvider); - eventsInstance = new Events(); - }) - ); - - it('should handle on events', function () { - const obj = new Events(); - const prom = obj.on('test', function (message) { - expect(message).to.equal('Hello World'); - }); - - obj.emit('test', 'Hello World'); - - return prom; - }); - - it('should work with inherited objects', function () { - createLegacyClass(MyEventedObject).inherits(Events); - function MyEventedObject() { - MyEventedObject.Super.call(this); - } - const obj = new MyEventedObject(); - - const prom = obj.on('test', function (message) { - expect(message).to.equal('Hello World'); - }); - - obj.emit('test', 'Hello World'); - - return prom; - }); - - it('should clear events when off is called', function () { - const obj = new Events(); - obj.on('test', _.noop); - expect(obj._listeners).to.have.property('test'); - expect(obj._listeners.test).to.have.length(1); - obj.off(); - expect(obj._listeners).to.not.have.property('test'); - }); - - it('should clear a specific handler when off is called for an event', function () { - const obj = new Events(); - const handler1 = sinon.stub(); - const handler2 = sinon.stub(); - obj.on('test', handler1); - obj.on('test', handler2); - expect(obj._listeners).to.have.property('test'); - obj.off('test', handler1); - - return obj.emit('test', 'Hello World').then(function () { - sinon.assert.calledOnce(handler2); - sinon.assert.notCalled(handler1); - }); - }); - - it('should clear a all handlers when off is called for an event', function () { - const obj = new Events(); - const handler1 = sinon.stub(); - obj.on('test', handler1); - expect(obj._listeners).to.have.property('test'); - obj.off('test'); - expect(obj._listeners).to.not.have.property('test'); - - return obj.emit('test', 'Hello World').then(function () { - sinon.assert.notCalled(handler1); - }); - }); - - it('should handle multiple identical emits in the same tick', function () { - const obj = new Events(); - const handler1 = sinon.stub(); - - obj.on('test', handler1); - const emits = [obj.emit('test', 'one'), obj.emit('test', 'two'), obj.emit('test', 'three')]; - - return Promise.all(emits).then(function () { - expect(handler1.callCount).to.be(emits.length); - expect(handler1.getCall(0).calledWith('one')).to.be(true); - expect(handler1.getCall(1).calledWith('two')).to.be(true); - expect(handler1.getCall(2).calledWith('three')).to.be(true); - }); - }); - - it('should handle emits from the handler', function () { - const obj = new Events(); - const secondEmit = createDefer(Promise); - - const handler1 = sinon.spy(function () { - if (handler1.calledTwice) { - return; - } - obj.emit('test').then(_.bindKey(secondEmit, 'resolve')); - }); - - obj.on('test', handler1); - - return Promise.all([obj.emit('test'), secondEmit.promise]).then(function () { - expect(handler1.callCount).to.be(2); - }); - }); - - it('should only emit to handlers registered before emit is called', function () { - const obj = new Events(); - const handler1 = sinon.stub(); - const handler2 = sinon.stub(); - - obj.on('test', handler1); - const emits = [obj.emit('test', 'one'), obj.emit('test', 'two'), obj.emit('test', 'three')]; - - return Promise.all(emits).then(function () { - expect(handler1.callCount).to.be(emits.length); - - obj.on('test', handler2); - - const emits2 = [obj.emit('test', 'four'), obj.emit('test', 'five'), obj.emit('test', 'six')]; - - return Promise.all(emits2).then(function () { - expect(handler1.callCount).to.be(emits.length + emits2.length); - expect(handler2.callCount).to.be(emits2.length); - }); - }); - }); - - it('should pass multiple arguments from the emitter', function () { - const obj = new Events(); - const handler = sinon.stub(); - const payload = ['one', { hello: 'tests' }, null]; - - obj.on('test', handler); - - return obj.emit('test', payload[0], payload[1], payload[2]).then(function () { - expect(handler.callCount).to.be(1); - expect(handler.calledWithExactly(payload[0], payload[1], payload[2])).to.be(true); - }); - }); - - it('should preserve the scope of the handler', function () { - const obj = new Events(); - const expected = 'some value'; - let testValue; - - function handler() { - testValue = this.getVal(); - } - handler.getVal = _.constant(expected); - - obj.on('test', handler); - return obj.emit('test').then(function () { - expect(testValue).to.equal(expected); - }); - }); - - it('should always emit in the same order', function () { - const handler = sinon.stub(); - - const obj = new Events(); - obj.on('block', _.partial(handler, 'block')); - obj.on('last', _.partial(handler, 'last')); - - return Promise.all([ - obj.emit('block'), - obj.emit('block'), - obj.emit('block'), - obj.emit('block'), - obj.emit('block'), - obj.emit('block'), - obj.emit('block'), - obj.emit('block'), - obj.emit('block'), - obj.emit('last'), - ]).then(function () { - expect(handler.callCount).to.be(10); - handler.args.forEach(function (args, i) { - expect(args[0]).to.be(i < 9 ? 'block' : 'last'); - }); - }); - }); - - it('calls emitted handlers asynchronously', (done) => { - const listenerStub = sinon.stub(); - eventsInstance.on('test', listenerStub); - eventsInstance.emit('test'); - sinon.assert.notCalled(listenerStub); - - setTimeout(() => { - sinon.assert.calledOnce(listenerStub); - done(); - }, 100); - }); - - it('calling off after an emit that has not yet triggered the handler, will not call the handler', (done) => { - const listenerStub = sinon.stub(); - eventsInstance.on('test', listenerStub); - eventsInstance.emit('test'); - // It's called asynchronously so it shouldn't be called yet. - sinon.assert.notCalled(listenerStub); - eventsInstance.off('test', listenerStub); - - setTimeout(() => { - sinon.assert.notCalled(listenerStub); - done(); - }, 100); - }); -}); diff --git a/src/legacy/ui/public/__tests__/metadata.js b/src/legacy/ui/public/__tests__/metadata.js deleted file mode 100644 index c5051d70849cd..0000000000000 --- a/src/legacy/ui/public/__tests__/metadata.js +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import { metadata } from '../metadata'; -describe('ui/metadata', () => { - it('is immutable', () => { - expect(() => (metadata.foo = 'something')).to.throw; - expect(() => (metadata.version = 'something')).to.throw; - expect(() => (metadata.vars = {})).to.throw; - expect(() => (metadata.vars.kbnIndex = 'something')).to.throw; - }); -}); diff --git a/src/legacy/ui/public/_index.scss b/src/legacy/ui/public/_index.scss deleted file mode 100644 index a441b773d4a4e..0000000000000 --- a/src/legacy/ui/public/_index.scss +++ /dev/null @@ -1,9 +0,0 @@ -// Prefix all styles with "kbn" to avoid conflicts. -// Examples -// kbnChart -// kbnChart__legend -// kbnChart__legend--small -// kbnChart__legend-isLoading - -@import './accessibility/index'; -@import './directives/index'; diff --git a/src/legacy/ui/public/accessibility/__tests__/kbn_accessible_click.js b/src/legacy/ui/public/accessibility/__tests__/kbn_accessible_click.js deleted file mode 100644 index f3b7ab29d8a14..0000000000000 --- a/src/legacy/ui/public/accessibility/__tests__/kbn_accessible_click.js +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import angular from 'angular'; -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import '../kbn_accessible_click'; -import { keys } from '@elastic/eui'; - -describe('kbnAccessibleClick directive', () => { - let $compile; - let $rootScope; - - beforeEach(ngMock.module('kibana')); - - beforeEach( - ngMock.inject(function (_$compile_, _$rootScope_) { - $compile = _$compile_; - $rootScope = _$rootScope_; - }) - ); - - describe('throws an error', () => { - it('when the element is a button', () => { - const html = ``; - expect(() => { - $compile(html)($rootScope); - }).to.throwError(/kbnAccessibleClick doesn't need to be used on a button./); - }); - - it('when the element is a link with an href', () => { - const html = ``; - expect(() => { - $compile(html)($rootScope); - }).to.throwError( - /kbnAccessibleClick doesn't need to be used on a link if it has a href attribute./ - ); - }); - - it(`when the element doesn't have an ng-click`, () => { - const html = `
`; - expect(() => { - $compile(html)($rootScope); - }).to.throwError(/kbnAccessibleClick requires ng-click to be defined on its element./); - }); - }); - - describe(`doesn't throw an error`, () => { - it('when the element is a link without an href', () => { - const html = ``; - expect(() => { - $compile(html)($rootScope); - }).not.to.throwError(); - }); - }); - - describe('adds accessibility attributes', () => { - it('tabindex', () => { - const html = `
`; - const element = $compile(html)($rootScope); - expect(element.attr('tabindex')).to.be('0'); - }); - - it('role', () => { - const html = `
`; - const element = $compile(html)($rootScope); - expect(element.attr('role')).to.be('button'); - }); - }); - - describe(`doesn't override pre-existing accessibility attributes`, () => { - it('tabindex', () => { - const html = `
`; - const element = $compile(html)($rootScope); - expect(element.attr('tabindex')).to.be('1'); - }); - - it('role', () => { - const html = `
`; - const element = $compile(html)($rootScope); - expect(element.attr('role')).to.be('submit'); - }); - }); - - describe(`calls ng-click`, () => { - let scope; - let element; - - beforeEach(function () { - scope = $rootScope.$new(); - scope.handleClick = sinon.stub(); - const html = `
`; - element = $compile(html)(scope); - }); - - it(`on ENTER keyup`, () => { - const e = angular.element.Event('keyup'); // eslint-disable-line new-cap - e.key = keys.ENTER; - element.trigger(e); - sinon.assert.calledOnce(scope.handleClick); - }); - - it(`on SPACE keyup`, () => { - const e = angular.element.Event('keyup'); // eslint-disable-line new-cap - e.key = keys.SPACE; - element.trigger(e); - sinon.assert.calledOnce(scope.handleClick); - }); - }); -}); diff --git a/src/legacy/ui/public/accessibility/__tests__/kbn_ui_ace_keyboard_mode.js b/src/legacy/ui/public/accessibility/__tests__/kbn_ui_ace_keyboard_mode.js deleted file mode 100644 index ce1bf95bf0fb7..0000000000000 --- a/src/legacy/ui/public/accessibility/__tests__/kbn_ui_ace_keyboard_mode.js +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import angular from 'angular'; -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import '../kbn_ui_ace_keyboard_mode'; -import { keys } from '@elastic/eui'; - -describe('kbnUiAceKeyboardMode directive', () => { - let element; - - beforeEach(ngMock.module('kibana')); - - beforeEach( - ngMock.inject(($compile, $rootScope) => { - element = $compile(`
`)($rootScope.$new()); - }) - ); - - it('should add the hint element', () => { - expect(element.find('.kbnUiAceKeyboardHint').length).to.be(1); - }); - - describe('hint element', () => { - it('should be tabable', () => { - expect(element.find('.kbnUiAceKeyboardHint').attr('tabindex')).to.be('0'); - }); - - it('should move focus to textbox and be inactive if pressed enter on it', () => { - const textarea = element.find('textarea'); - sinon.spy(textarea[0], 'focus'); - const ev = angular.element.Event('keydown'); // eslint-disable-line new-cap - ev.key = keys.ENTER; - element.find('.kbnUiAceKeyboardHint').trigger(ev); - expect(textarea[0].focus.called).to.be(true); - expect( - element.find('.kbnUiAceKeyboardHint').hasClass('kbnUiAceKeyboardHint-isInactive') - ).to.be(true); - }); - - it('should be shown again, when pressing Escape in ace editor', () => { - const textarea = element.find('textarea'); - const hint = element.find('.kbnUiAceKeyboardHint'); - sinon.spy(hint[0], 'focus'); - const ev = angular.element.Event('keydown'); // eslint-disable-line new-cap - ev.key = keys.ESCAPE; - textarea.trigger(ev); - expect(hint[0].focus.called).to.be(true); - expect(hint.hasClass('kbnUiAceKeyboardHint-isInactive')).to.be(false); - }); - }); - - describe('ui-ace textarea', () => { - it('should not be tabable anymore', () => { - expect(element.find('textarea').attr('tabindex')).to.be('-1'); - }); - }); -}); - -describe('kbnUiAceKeyboardModeService', () => { - let element; - - beforeEach(ngMock.module('kibana')); - - beforeEach( - ngMock.inject(($compile, $rootScope, kbnUiAceKeyboardModeService) => { - const scope = $rootScope.$new(); - element = $compile(`
`)(scope); - kbnUiAceKeyboardModeService.initialize(scope, element); - }) - ); - - it('should add the hint element', () => { - expect(element.find('.kbnUiAceKeyboardHint').length).to.be(1); - }); - - describe('hint element', () => { - it('should be tabable', () => { - expect(element.find('.kbnUiAceKeyboardHint').attr('tabindex')).to.be('0'); - }); - - it('should move focus to textbox and be inactive if pressed enter on it', () => { - const textarea = element.find('textarea'); - sinon.spy(textarea[0], 'focus'); - const ev = angular.element.Event('keydown'); // eslint-disable-line new-cap - ev.key = keys.ENTER; - element.find('.kbnUiAceKeyboardHint').trigger(ev); - expect(textarea[0].focus.called).to.be(true); - expect( - element.find('.kbnUiAceKeyboardHint').hasClass('kbnUiAceKeyboardHint-isInactive') - ).to.be(true); - }); - - it('should be shown again, when pressing Escape in ace editor', () => { - const textarea = element.find('textarea'); - const hint = element.find('.kbnUiAceKeyboardHint'); - sinon.spy(hint[0], 'focus'); - const ev = angular.element.Event('keydown'); // eslint-disable-line new-cap - ev.key = keys.ESCAPE; - textarea.trigger(ev); - expect(hint[0].focus.called).to.be(true); - expect(hint.hasClass('kbnUiAceKeyboardHint-isInactive')).to.be(false); - }); - }); - - describe('ui-ace textarea', () => { - it('should not be tabable anymore', () => { - expect(element.find('textarea').attr('tabindex')).to.be('-1'); - }); - }); -}); diff --git a/src/legacy/ui/public/accessibility/__tests__/scrollto_activedescendant.js b/src/legacy/ui/public/accessibility/__tests__/scrollto_activedescendant.js deleted file mode 100644 index d5ccbf887e79b..0000000000000 --- a/src/legacy/ui/public/accessibility/__tests__/scrollto_activedescendant.js +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import '../scrollto_activedescendant'; - -describe('scrolltoActivedescendant directive', () => { - let $compile; - let $rootScope; - - beforeEach(ngMock.module('kibana')); - - beforeEach( - ngMock.inject((_$compile_, _$rootScope_) => { - $compile = _$compile_; - $rootScope = _$rootScope_; - }) - ); - - it('should call scrollIntoView on aria-activedescendant changes', () => { - const scope = $rootScope.$new(); - scope.ad = ''; - const element = $compile(`
- - -
`)(scope); - const child1 = element.find('#child1'); - const child2 = element.find('#child2'); - sinon.spy(child1[0], 'scrollIntoView'); - sinon.spy(child2[0], 'scrollIntoView'); - scope.ad = 'child1'; - scope.$digest(); - expect(child1[0].scrollIntoView.calledOnce).to.be.eql(true); - scope.ad = 'child2'; - scope.$digest(); - expect(child2[0].scrollIntoView.calledOnce).to.be.eql(true); - }); -}); diff --git a/src/legacy/ui/public/accessibility/_index.scss b/src/legacy/ui/public/accessibility/_index.scss deleted file mode 100644 index 95062449e4b13..0000000000000 --- a/src/legacy/ui/public/accessibility/_index.scss +++ /dev/null @@ -1 +0,0 @@ -@import './kbn_ui_ace_keyboard_mode'; diff --git a/src/legacy/ui/public/accessibility/_kbn_ui_ace_keyboard_mode.scss b/src/legacy/ui/public/accessibility/_kbn_ui_ace_keyboard_mode.scss deleted file mode 100644 index 9ace600db4197..0000000000000 --- a/src/legacy/ui/public/accessibility/_kbn_ui_ace_keyboard_mode.scss +++ /dev/null @@ -1,24 +0,0 @@ -.kbnUiAceKeyboardHint { - position: absolute; - top: 0; - bottom: 0; - right: 0; - left: 0; - background: transparentize($euiColorEmptyShade, 0.3); - display: flex; - flex-direction: column; - justify-content: center; - align-items: center; - text-align: center; - opacity: 0; - - &:focus { - opacity: 1; - border: 2px solid $euiColorPrimary; - z-index: 1000; - } - - &.kbnUiAceKeyboardHint-isInactive { - display: none; - } -} diff --git a/src/legacy/ui/public/accessibility/angular_aria.js b/src/legacy/ui/public/accessibility/angular_aria.js deleted file mode 100644 index 4335eddf0d8cd..0000000000000 --- a/src/legacy/ui/public/accessibility/angular_aria.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import 'angular-aria'; -import { uiModules } from '../modules'; - -/** - * This module will take care of attaching appropriate aria tags related to some angular stuff, - * e.g. it will attach aria-invalid if the model state is set to invalid. - * - * You can find more infos in the official documentation: https://docs.angularjs.org/api/ngAria. - * - * Three settings are disabled: it won't automatically attach `tabindex`, `role=button` or - * handling keyboard events for `ngClick` directives. Kibana uses `kbnAccessibleClick` to handle - * those cases where you need an `ngClick` non button element to have keyboard access. - */ -uiModules.get('kibana', ['ngAria']).config(($ariaProvider) => { - $ariaProvider.config({ - bindKeydown: false, - bindRoleForClick: false, - tabindex: false, - }); -}); diff --git a/src/legacy/ui/public/accessibility/index.js b/src/legacy/ui/public/accessibility/index.js deleted file mode 100644 index 5ff2521421866..0000000000000 --- a/src/legacy/ui/public/accessibility/index.js +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import './angular_aria'; -import './kbn_accessible_click'; -import './scrollto_activedescendant'; diff --git a/src/legacy/ui/public/accessibility/kbn_accessible_click.js b/src/legacy/ui/public/accessibility/kbn_accessible_click.js deleted file mode 100644 index a57fbc6be82fd..0000000000000 --- a/src/legacy/ui/public/accessibility/kbn_accessible_click.js +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Interactive elements must be able to receive focus. - * - * Ideally, this means using elements that are natively keyboard accessible (, - * , or - - -`; diff --git a/src/legacy/ui/public/exit_full_screen/_index.scss b/src/legacy/ui/public/exit_full_screen/_index.scss deleted file mode 100644 index 33dff05e2a687..0000000000000 --- a/src/legacy/ui/public/exit_full_screen/_index.scss +++ /dev/null @@ -1 +0,0 @@ -@import '../../../../plugins/kibana_react/public/exit_full_screen_button/index'; diff --git a/src/legacy/ui/public/exit_full_screen/exit_full_screen_button.test.js b/src/legacy/ui/public/exit_full_screen/exit_full_screen_button.test.js deleted file mode 100644 index d4273c0fdb207..0000000000000 --- a/src/legacy/ui/public/exit_full_screen/exit_full_screen_button.test.js +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -jest.mock( - 'ui/chrome', - () => ({ - getKibanaVersion: () => '6.0.0', - setVisible: () => {}, - }), - { virtual: true } -); - -import React from 'react'; -import { mountWithIntl, renderWithIntl } from 'test_utils/enzyme_helpers'; -import sinon from 'sinon'; -import chrome from 'ui/chrome'; - -import { ExitFullScreenButton } from './exit_full_screen_button'; - -import { keys } from '@elastic/eui'; - -test('is rendered', () => { - const component = renderWithIntl( {}} />); - - expect(component).toMatchSnapshot(); -}); - -describe('onExitFullScreenMode', () => { - test('is called when the button is pressed', () => { - const onExitHandler = sinon.stub(); - - const component = mountWithIntl(); - - component.find('button').simulate('click'); - - sinon.assert.calledOnce(onExitHandler); - }); - - test('is called when the ESC key is pressed', () => { - const onExitHandler = sinon.stub(); - - mountWithIntl(); - - const escapeKeyEvent = new KeyboardEvent('keydown', { key: keys.ESCAPE }); - document.dispatchEvent(escapeKeyEvent); - - sinon.assert.calledOnce(onExitHandler); - }); -}); - -describe('chrome.setVisible', () => { - test('is called with false when the component is rendered', () => { - chrome.setVisible = sinon.stub(); - - const component = mountWithIntl( {}} />); - - component.find('button').simulate('click'); - - sinon.assert.calledOnce(chrome.setVisible); - sinon.assert.calledWith(chrome.setVisible, false); - }); - - test('is called with true the component is unmounted', () => { - const component = mountWithIntl( {}} />); - - chrome.setVisible = sinon.stub(); - component.unmount(); - - sinon.assert.calledOnce(chrome.setVisible); - sinon.assert.calledWith(chrome.setVisible, true); - }); -}); diff --git a/src/legacy/ui/public/exit_full_screen/exit_full_screen_button.tsx b/src/legacy/ui/public/exit_full_screen/exit_full_screen_button.tsx deleted file mode 100644 index db4101010f6d6..0000000000000 --- a/src/legacy/ui/public/exit_full_screen/exit_full_screen_button.tsx +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React, { PureComponent } from 'react'; -import chrome from 'ui/chrome'; -import { ExitFullScreenButton as ExitFullScreenButtonUi } from '../../../../plugins/kibana_react/public'; - -/** - * DO NOT USE THIS COMPONENT, IT IS DEPRECATED. - * Use the one in `src/plugins/kibana_react`. - */ - -interface Props { - onExitFullScreenMode: () => void; -} - -export class ExitFullScreenButton extends PureComponent { - public UNSAFE_componentWillMount() { - chrome.setVisible(false); - } - - public componentWillUnmount() { - chrome.setVisible(true); - } - - public render() { - return ; - } -} diff --git a/src/legacy/ui/public/exit_full_screen/index.ts b/src/legacy/ui/public/exit_full_screen/index.ts deleted file mode 100644 index a965fd776e0c2..0000000000000 --- a/src/legacy/ui/public/exit_full_screen/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { ExitFullScreenButton } from './exit_full_screen_button'; diff --git a/src/legacy/ui/public/flot-charts/API.md b/src/legacy/ui/public/flot-charts/API.md deleted file mode 100644 index 699e2500f4942..0000000000000 --- a/src/legacy/ui/public/flot-charts/API.md +++ /dev/null @@ -1,1498 +0,0 @@ -# Flot Reference # - -**Table of Contents** - -[Introduction](#introduction) -| [Data Format](#data-format) -| [Plot Options](#plot-options) -| [Customizing the legend](#customizing-the-legend) -| [Customizing the axes](#customizing-the-axes) -| [Multiple axes](#multiple-axes) -| [Time series data](#time-series-data) -| [Customizing the data series](#customizing-the-data-series) -| [Customizing the grid](#customizing-the-grid) -| [Specifying gradients](#specifying-gradients) -| [Plot Methods](#plot-methods) -| [Hooks](#hooks) -| [Plugins](#plugins) -| [Version number](#version-number) - ---- - -## Introduction ## - -Consider a call to the plot function: - -```js -var plot = $.plot(placeholder, data, options) -``` - -The placeholder is a jQuery object or DOM element or jQuery expression -that the plot will be put into. This placeholder needs to have its -width and height set as explained in the [README](README.md) (go read that now if -you haven't, it's short). The plot will modify some properties of the -placeholder so it's recommended you simply pass in a div that you -don't use for anything else. Make sure you check any fancy styling -you apply to the div, e.g. background images have been reported to be a -problem on IE 7. - -The plot function can also be used as a jQuery chainable property. This form -naturally can't return the plot object directly, but you can still access it -via the 'plot' data key, like this: - -```js -var plot = $("#placeholder").plot(data, options).data("plot"); -``` - -The format of the data is documented below, as is the available -options. The plot object returned from the call has some methods you -can call. These are documented separately below. - -Note that in general Flot gives no guarantees if you change any of the -objects you pass in to the plot function or get out of it since -they're not necessarily deep-copied. - - -## Data Format ## - -The data is an array of data series: - -```js -[ series1, series2, ... ] -``` - -A series can either be raw data or an object with properties. The raw -data format is an array of points: - -```js -[ [x1, y1], [x2, y2], ... ] -``` - -E.g. - -```js -[ [1, 3], [2, 14.01], [3.5, 3.14] ] -``` - -Note that to simplify the internal logic in Flot both the x and y -values must be numbers (even if specifying time series, see below for -how to do this). This is a common problem because you might retrieve -data from the database and serialize them directly to JSON without -noticing the wrong type. If you're getting mysterious errors, double -check that you're inputting numbers and not strings. - -If a null is specified as a point or if one of the coordinates is null -or couldn't be converted to a number, the point is ignored when -drawing. As a special case, a null value for lines is interpreted as a -line segment end, i.e. the points before and after the null value are -not connected. - -Lines and points take two coordinates. For filled lines and bars, you -can specify a third coordinate which is the bottom of the filled -area/bar (defaults to 0). - -The format of a single series object is as follows: - -```js -{ - color: color or number - data: rawdata - label: string - lines: specific lines options - bars: specific bars options - points: specific points options - xaxis: number - yaxis: number - clickable: boolean - hoverable: boolean - shadowSize: number - highlightColor: color or number -} -``` - -You don't have to specify any of them except the data, the rest are -options that will get default values. Typically you'd only specify -label and data, like this: - -```js -{ - label: "y = 3", - data: [[0, 3], [10, 3]] -} -``` - -The label is used for the legend, if you don't specify one, the series -will not show up in the legend. - -If you don't specify color, the series will get a color from the -auto-generated colors. The color is either a CSS color specification -(like "rgb(255, 100, 123)") or an integer that specifies which of -auto-generated colors to select, e.g. 0 will get color no. 0, etc. - -The latter is mostly useful if you let the user add and remove series, -in which case you can hard-code the color index to prevent the colors -from jumping around between the series. - -The "xaxis" and "yaxis" options specify which axis to use. The axes -are numbered from 1 (default), so { yaxis: 2} means that the series -should be plotted against the second y axis. - -"clickable" and "hoverable" can be set to false to disable -interactivity for specific series if interactivity is turned on in -the plot, see below. - -The rest of the options are all documented below as they are the same -as the default options passed in via the options parameter in the plot -command. When you specify them for a specific data series, they will -override the default options for the plot for that data series. - -Here's a complete example of a simple data specification: - -```js -[ { label: "Foo", data: [ [10, 1], [17, -14], [30, 5] ] }, - { label: "Bar", data: [ [11, 13], [19, 11], [30, -7] ] } -] -``` - - -## Plot Options ## - -All options are completely optional. They are documented individually -below, to change them you just specify them in an object, e.g. - -```js -var options = { - series: { - lines: { show: true }, - points: { show: true } - } -}; - -$.plot(placeholder, data, options); -``` - - -## Customizing the legend ## - -```js -legend: { - show: boolean - labelFormatter: null or (fn: string, series object -> string) - labelBoxBorderColor: color - noColumns: number - position: "ne" or "nw" or "se" or "sw" - margin: number of pixels or [x margin, y margin] - backgroundColor: null or color - backgroundOpacity: number between 0 and 1 - container: null or jQuery object/DOM element/jQuery expression - sorted: null/false, true, "ascending", "descending", "reverse", or a comparator -} -``` - -The legend is generated as a table with the data series labels and -small label boxes with the color of the series. If you want to format -the labels in some way, e.g. make them to links, you can pass in a -function for "labelFormatter". Here's an example that makes them -clickable: - -```js -labelFormatter: function(label, series) { - // series is the series object for the label - return '' + label + ''; -} -``` - -To prevent a series from showing up in the legend, simply have the function -return null. - -"noColumns" is the number of columns to divide the legend table into. -"position" specifies the overall placement of the legend within the -plot (top-right, top-left, etc.) and margin the distance to the plot -edge (this can be either a number or an array of two numbers like [x, -y]). "backgroundColor" and "backgroundOpacity" specifies the -background. The default is a partly transparent auto-detected -background. - -If you want the legend to appear somewhere else in the DOM, you can -specify "container" as a jQuery object/expression to put the legend -table into. The "position" and "margin" etc. options will then be -ignored. Note that Flot will overwrite the contents of the container. - -Legend entries appear in the same order as their series by default. If "sorted" -is "reverse" then they appear in the opposite order from their series. To sort -them alphabetically, you can specify true, "ascending" or "descending", where -true and "ascending" are equivalent. - -You can also provide your own comparator function that accepts two -objects with "label" and "color" properties, and returns zero if they -are equal, a positive value if the first is greater than the second, -and a negative value if the first is less than the second. - -```js -sorted: function(a, b) { - // sort alphabetically in ascending order - return a.label == b.label ? 0 : ( - a.label > b.label ? 1 : -1 - ) -} -``` - - -## Customizing the axes ## - -```js -xaxis, yaxis: { - show: null or true/false - position: "bottom" or "top" or "left" or "right" - mode: null or "time" ("time" requires jquery.flot.time.js plugin) - timezone: null, "browser" or timezone (only makes sense for mode: "time") - - color: null or color spec - tickColor: null or color spec - font: null or font spec object - - min: null or number - max: null or number - autoscaleMargin: null or number - - transform: null or fn: number -> number - inverseTransform: null or fn: number -> number - - ticks: null or number or ticks array or (fn: axis -> ticks array) - tickSize: number or array - minTickSize: number or array - tickFormatter: (fn: number, object -> string) or string - tickDecimals: null or number - - labelWidth: null or number - labelHeight: null or number - reserveSpace: null or true - - tickLength: null or number - - alignTicksWithAxis: null or number -} -``` - -All axes have the same kind of options. The following describes how to -configure one axis, see below for what to do if you've got more than -one x axis or y axis. - -If you don't set the "show" option (i.e. it is null), visibility is -auto-detected, i.e. the axis will show up if there's data associated -with it. You can override this by setting the "show" option to true or -false. - -The "position" option specifies where the axis is placed, bottom or -top for x axes, left or right for y axes. The "mode" option determines -how the data is interpreted, the default of null means as decimal -numbers. Use "time" for time series data; see the time series data -section. The time plugin (jquery.flot.time.js) is required for time -series support. - -The "color" option determines the color of the line and ticks for the axis, and -defaults to the grid color with transparency. For more fine-grained control you -can also set the color of the ticks separately with "tickColor". - -You can customize the font and color used to draw the axis tick labels with CSS -or directly via the "font" option. When "font" is null - the default - each -tick label is given the 'flot-tick-label' class. For compatibility with Flot -0.7 and earlier the labels are also given the 'tickLabel' class, but this is -deprecated and scheduled to be removed with the release of version 1.0.0. - -To enable more granular control over styles, labels are divided between a set -of text containers, with each holding the labels for one axis. These containers -are given the classes 'flot-[x|y]-axis', and 'flot-[x|y]#-axis', where '#' is -the number of the axis when there are multiple axes. For example, the x-axis -labels for a simple plot with only a single x-axis might look like this: - -```html -
-
January 2013
- ... -
-``` - -For direct control over label styles you can also provide "font" as an object -with this format: - -```js -{ - size: 11, - lineHeight: 13, - style: "italic", - weight: "bold", - family: "sans-serif", - variant: "small-caps", - color: "#545454" -} -``` - -The size and lineHeight must be expressed in pixels; CSS units such as 'em' -or 'smaller' are not allowed. - -The options "min"/"max" are the precise minimum/maximum value on the -scale. If you don't specify either of them, a value will automatically -be chosen based on the minimum/maximum data values. Note that Flot -always examines all the data values you feed to it, even if a -restriction on another axis may make some of them invisible (this -makes interactive use more stable). - -The "autoscaleMargin" is a bit esoteric: it's the fraction of margin -that the scaling algorithm will add to avoid that the outermost points -ends up on the grid border. Note that this margin is only applied when -a min or max value is not explicitly set. If a margin is specified, -the plot will furthermore extend the axis end-point to the nearest -whole tick. The default value is "null" for the x axes and 0.02 for y -axes which seems appropriate for most cases. - -"transform" and "inverseTransform" are callbacks you can put in to -change the way the data is drawn. You can design a function to -compress or expand certain parts of the axis non-linearly, e.g. -suppress weekends or compress far away points with a logarithm or some -other means. When Flot draws the plot, each value is first put through -the transform function. Here's an example, the x axis can be turned -into a natural logarithm axis with the following code: - -```js -xaxis: { - transform: function (v) { return Math.log(v); }, - inverseTransform: function (v) { return Math.exp(v); } -} -``` - -Similarly, for reversing the y axis so the values appear in inverse -order: - -```js -yaxis: { - transform: function (v) { return -v; }, - inverseTransform: function (v) { return -v; } -} -``` - -Note that for finding extrema, Flot assumes that the transform -function does not reorder values (it should be monotone). - -The inverseTransform is simply the inverse of the transform function -(so v == inverseTransform(transform(v)) for all relevant v). It is -required for converting from canvas coordinates to data coordinates, -e.g. for a mouse interaction where a certain pixel is clicked. If you -don't use any interactive features of Flot, you may not need it. - - -The rest of the options deal with the ticks. - -If you don't specify any ticks, a tick generator algorithm will make -some for you. The algorithm has two passes. It first estimates how -many ticks would be reasonable and uses this number to compute a nice -round tick interval size. Then it generates the ticks. - -You can specify how many ticks the algorithm aims for by setting -"ticks" to a number. The algorithm always tries to generate reasonably -round tick values so even if you ask for three ticks, you might get -five if that fits better with the rounding. If you don't want any -ticks at all, set "ticks" to 0 or an empty array. - -Another option is to skip the rounding part and directly set the tick -interval size with "tickSize". If you set it to 2, you'll get ticks at -2, 4, 6, etc. Alternatively, you can specify that you just don't want -ticks at a size less than a specific tick size with "minTickSize". -Note that for time series, the format is an array like [2, "month"], -see the next section. - -If you want to completely override the tick algorithm, you can specify -an array for "ticks", either like this: - -```js -ticks: [0, 1.2, 2.4] -``` - -Or like this where the labels are also customized: - -```js -ticks: [[0, "zero"], [1.2, "one mark"], [2.4, "two marks"]] -``` - -You can mix the two if you like. - -For extra flexibility you can specify a function as the "ticks" -parameter. The function will be called with an object with the axis -min and max and should return a ticks array. Here's a simplistic tick -generator that spits out intervals of pi, suitable for use on the x -axis for trigonometric functions: - -```js -function piTickGenerator(axis) { - var res = [], i = Math.floor(axis.min / Math.PI); - do { - var v = i * Math.PI; - res.push([v, i + "\u03c0"]); - ++i; - } while (v < axis.max); - return res; -} -``` - -You can control how the ticks look like with "tickDecimals", the -number of decimals to display (default is auto-detected). - -Alternatively, for ultimate control over how ticks are formatted you can -provide a function to "tickFormatter". The function is passed two -parameters, the tick value and an axis object with information, and -should return a string. The default formatter looks like this: - -```js -function formatter(val, axis) { - return val.toFixed(axis.tickDecimals); -} -``` - -The axis object has "min" and "max" with the range of the axis, -"tickDecimals" with the number of decimals to round the value to and -"tickSize" with the size of the interval between ticks as calculated -by the automatic axis scaling algorithm (or specified by you). Here's -an example of a custom formatter: - -```js -function suffixFormatter(val, axis) { - if (val > 1000000) - return (val / 1000000).toFixed(axis.tickDecimals) + " MB"; - else if (val > 1000) - return (val / 1000).toFixed(axis.tickDecimals) + " kB"; - else - return val.toFixed(axis.tickDecimals) + " B"; -} -``` - -"labelWidth" and "labelHeight" specifies a fixed size of the tick -labels in pixels. They're useful in case you need to align several -plots. "reserveSpace" means that even if an axis isn't shown, Flot -should reserve space for it - it is useful in combination with -labelWidth and labelHeight for aligning multi-axis charts. - -"tickLength" is the length of the tick lines in pixels. By default, the -innermost axes will have ticks that extend all across the plot, while -any extra axes use small ticks. A value of null means use the default, -while a number means small ticks of that length - set it to 0 to hide -the lines completely. - -If you set "alignTicksWithAxis" to the number of another axis, e.g. -alignTicksWithAxis: 1, Flot will ensure that the autogenerated ticks -of this axis are aligned with the ticks of the other axis. This may -improve the looks, e.g. if you have one y axis to the left and one to -the right, because the grid lines will then match the ticks in both -ends. The trade-off is that the forced ticks won't necessarily be at -natural places. - - -## Multiple axes ## - -If you need more than one x axis or y axis, you need to specify for -each data series which axis they are to use, as described under the -format of the data series, e.g. { data: [...], yaxis: 2 } specifies -that a series should be plotted against the second y axis. - -To actually configure that axis, you can't use the xaxis/yaxis options -directly - instead there are two arrays in the options: - -```js -xaxes: [] -yaxes: [] -``` - -Here's an example of configuring a single x axis and two y axes (we -can leave options of the first y axis empty as the defaults are fine): - -```js -{ - xaxes: [ { position: "top" } ], - yaxes: [ { }, { position: "right", min: 20 } ] -} -``` - -The arrays get their default values from the xaxis/yaxis settings, so -say you want to have all y axes start at zero, you can simply specify -yaxis: { min: 0 } instead of adding a min parameter to all the axes. - -Generally, the various interfaces in Flot dealing with data points -either accept an xaxis/yaxis parameter to specify which axis number to -use (starting from 1), or lets you specify the coordinate directly as -x2/x3/... or x2axis/x3axis/... instead of "x" or "xaxis". - - -## Time series data ## - -Please note that it is now required to include the time plugin, -jquery.flot.time.js, for time series support. - -Time series are a bit more difficult than scalar data because -calendars don't follow a simple base 10 system. For many cases, Flot -abstracts most of this away, but it can still be a bit difficult to -get the data into Flot. So we'll first discuss the data format. - -The time series support in Flot is based on JavaScript timestamps, -i.e. everywhere a time value is expected or handed over, a JavaScript -timestamp number is used. This is a number, not a Date object. A -JavaScript timestamp is the number of milliseconds since January 1, -1970 00:00:00 UTC. This is almost the same as Unix timestamps, except it's -in milliseconds, so remember to multiply by 1000! - -You can see a timestamp like this - -```js -alert((new Date()).getTime()) -``` - -There are different schools of thought when it comes to display of -timestamps. Many will want the timestamps to be displayed according to -a certain time zone, usually the time zone in which the data has been -produced. Some want the localized experience, where the timestamps are -displayed according to the local time of the visitor. Flot supports -both. Optionally you can include a third-party library to get -additional timezone support. - -Default behavior is that Flot always displays timestamps according to -UTC. The reason being that the core JavaScript Date object does not -support other fixed time zones. Often your data is at another time -zone, so it may take a little bit of tweaking to work around this -limitation. - -The easiest way to think about it is to pretend that the data -production time zone is UTC, even if it isn't. So if you have a -datapoint at 2002-02-20 08:00, you can generate a timestamp for eight -o'clock UTC even if it really happened eight o'clock UTC+0200. - -In PHP you can get an appropriate timestamp with: - -```php -strtotime("2002-02-20 UTC") * 1000 -``` - -In Python you can get it with something like: - -```python -calendar.timegm(datetime_object.timetuple()) * 1000 -``` -In Ruby you can get it using the `#to_i` method on the -[`Time`](http://apidock.com/ruby/Time/to_i) object. If you're using the -`active_support` gem (default for Ruby on Rails applications) `#to_i` is also -available on the `DateTime` and `ActiveSupport::TimeWithZone` objects. You -simply need to multiply the result by 1000: - -```ruby -Time.now.to_i * 1000 # => 1383582043000 -# ActiveSupport examples: -DateTime.now.to_i * 1000 # => 1383582043000 -ActiveSupport::TimeZone.new('Asia/Shanghai').now.to_i * 1000 -# => 1383582043000 -``` - -In .NET you can get it with something like: - -```aspx -public static int GetJavaScriptTimestamp(System.DateTime input) -{ - System.TimeSpan span = new System.TimeSpan(System.DateTime.Parse("1/1/1970").Ticks); - System.DateTime time = input.Subtract(span); - return (long)(time.Ticks / 10000); -} -``` - -JavaScript also has some support for parsing date strings, so it is -possible to generate the timestamps manually client-side. - -If you've already got the real UTC timestamp, it's too late to use the -pretend trick described above. But you can fix up the timestamps by -adding the time zone offset, e.g. for UTC+0200 you would add 2 hours -to the UTC timestamp you got. Then it'll look right on the plot. Most -programming environments have some means of getting the timezone -offset for a specific date (note that you need to get the offset for -each individual timestamp to account for daylight savings). - -The alternative with core JavaScript is to interpret the timestamps -according to the time zone that the visitor is in, which means that -the ticks will shift with the time zone and daylight savings of each -visitor. This behavior is enabled by setting the axis option -"timezone" to the value "browser". - -If you need more time zone functionality than this, there is still -another option. If you include the "timezone-js" library - in the page and set axis.timezone -to a value recognized by said library, Flot will use timezone-js to -interpret the timestamps according to that time zone. - -Once you've gotten the timestamps into the data and specified "time" -as the axis mode, Flot will automatically generate relevant ticks and -format them. As always, you can tweak the ticks via the "ticks" option -- just remember that the values should be timestamps (numbers), not -Date objects. - -Tick generation and formatting can also be controlled separately -through the following axis options: - -```js -minTickSize: array -timeformat: null or format string -monthNames: null or array of size 12 of strings -dayNames: null or array of size 7 of strings -twelveHourClock: boolean -``` - -Here "timeformat" is a format string to use. You might use it like -this: - -```js -xaxis: { - mode: "time", - timeformat: "%Y/%m/%d" -} -``` - -This will result in tick labels like "2000/12/24". A subset of the -standard strftime specifiers are supported (plus the nonstandard %q): - -```js -%a: weekday name (customizable) -%b: month name (customizable) -%d: day of month, zero-padded (01-31) -%e: day of month, space-padded ( 1-31) -%H: hours, 24-hour time, zero-padded (00-23) -%I: hours, 12-hour time, zero-padded (01-12) -%m: month, zero-padded (01-12) -%M: minutes, zero-padded (00-59) -%q: quarter (1-4) -%S: seconds, zero-padded (00-59) -%y: year (two digits) -%Y: year (four digits) -%p: am/pm -%P: AM/PM (uppercase version of %p) -%w: weekday as number (0-6, 0 being Sunday) -``` - -Flot 0.8 switched from %h to the standard %H hours specifier. The %h specifier -is still available, for backwards-compatibility, but is deprecated and -scheduled to be removed permanently with the release of version 1.0. - -You can customize the month names with the "monthNames" option. For -instance, for Danish you might specify: - -```js -monthNames: ["jan", "feb", "mar", "apr", "maj", "jun", "jul", "aug", "sep", "okt", "nov", "dec"] -``` - -Similarly you can customize the weekday names with the "dayNames" -option. An example in French: - -```js -dayNames: ["dim", "lun", "mar", "mer", "jeu", "ven", "sam"] -``` - -If you set "twelveHourClock" to true, the autogenerated timestamps -will use 12 hour AM/PM timestamps instead of 24 hour. This only -applies if you have not set "timeformat". Use the "%I" and "%p" or -"%P" options if you want to build your own format string with 12-hour -times. - -If the Date object has a strftime property (and it is a function), it -will be used instead of the built-in formatter. Thus you can include -a strftime library such as http://hacks.bluesmoon.info/strftime/ for -more powerful date/time formatting. - -If everything else fails, you can control the formatting by specifying -a custom tick formatter function as usual. Here's a simple example -which will format December 24 as 24/12: - -```js -tickFormatter: function (val, axis) { - var d = new Date(val); - return d.getUTCDate() + "/" + (d.getUTCMonth() + 1); -} -``` - -Note that for the time mode "tickSize" and "minTickSize" are a bit -special in that they are arrays on the form "[value, unit]" where unit -is one of "second", "minute", "hour", "day", "month" and "year". So -you can specify - -```js -minTickSize: [1, "month"] -``` - -to get a tick interval size of at least 1 month and correspondingly, -if axis.tickSize is [2, "day"] in the tick formatter, the ticks have -been produced with two days in-between. - - -## Customizing the data series ## - -```js -series: { - lines, points, bars: { - show: boolean - lineWidth: number - fill: boolean or number - fillColor: null or color/gradient - } - - lines, bars: { - zero: boolean - } - - points: { - radius: number - symbol: "circle" or function - } - - bars: { - barWidth: number - align: "left", "right" or "center" - horizontal: boolean - } - - lines: { - steps: boolean - } - - shadowSize: number - highlightColor: color or number -} - -colors: [ color1, color2, ... ] -``` - -The options inside "series: {}" are copied to each of the series. So -you can specify that all series should have bars by putting it in the -global options, or override it for individual series by specifying -bars in a particular the series object in the array of data. - -The most important options are "lines", "points" and "bars" that -specify whether and how lines, points and bars should be shown for -each data series. In case you don't specify anything at all, Flot will -default to showing lines (you can turn this off with -lines: { show: false }). You can specify the various types -independently of each other, and Flot will happily draw each of them -in turn (this is probably only useful for lines and points), e.g. - -```js -var options = { - series: { - lines: { show: true, fill: true, fillColor: "rgba(255, 255, 255, 0.8)" }, - points: { show: true, fill: false } - } -}; -``` - -"lineWidth" is the thickness of the line or outline in pixels. You can -set it to 0 to prevent a line or outline from being drawn; this will -also hide the shadow. - -"fill" is whether the shape should be filled. For lines, this produces -area graphs. You can use "fillColor" to specify the color of the fill. -If "fillColor" evaluates to false (default for everything except -points which are filled with white), the fill color is auto-set to the -color of the data series. You can adjust the opacity of the fill by -setting fill to a number between 0 (fully transparent) and 1 (fully -opaque). - -For bars, fillColor can be a gradient, see the gradient documentation -below. "barWidth" is the width of the bars in units of the x axis (or -the y axis if "horizontal" is true), contrary to most other measures -that are specified in pixels. For instance, for time series the unit -is milliseconds so 24 * 60 * 60 * 1000 produces bars with the width of -a day. "align" specifies whether a bar should be left-aligned -(default), right-aligned or centered on top of the value it represents. -When "horizontal" is on, the bars are drawn horizontally, i.e. from the -y axis instead of the x axis; note that the bar end points are still -defined in the same way so you'll probably want to swap the -coordinates if you've been plotting vertical bars first. - -Area and bar charts normally start from zero, regardless of the data's range. -This is because they convey information through size, and starting from a -different value would distort their meaning. In cases where the fill is purely -for decorative purposes, however, "zero" allows you to override this behavior. -It defaults to true for filled lines and bars; setting it to false tells the -series to use the same automatic scaling as an un-filled line. - -For lines, "steps" specifies whether two adjacent data points are -connected with a straight (possibly diagonal) line or with first a -horizontal and then a vertical line. Note that this transforms the -data by adding extra points. - -For points, you can specify the radius and the symbol. The only -built-in symbol type is circles, for other types you can use a plugin -or define them yourself by specifying a callback: - -```js -function cross(ctx, x, y, radius, shadow) { - var size = radius * Math.sqrt(Math.PI) / 2; - ctx.moveTo(x - size, y - size); - ctx.lineTo(x + size, y + size); - ctx.moveTo(x - size, y + size); - ctx.lineTo(x + size, y - size); -} -``` - -The parameters are the drawing context, x and y coordinates of the -center of the point, a radius which corresponds to what the circle -would have used and whether the call is to draw a shadow (due to -limited canvas support, shadows are currently faked through extra -draws). It's good practice to ensure that the area covered by the -symbol is the same as for the circle with the given radius, this -ensures that all symbols have approximately the same visual weight. - -"shadowSize" is the default size of shadows in pixels. Set it to 0 to -remove shadows. - -"highlightColor" is the default color of the translucent overlay used -to highlight the series when the mouse hovers over it. - -The "colors" array specifies a default color theme to get colors for -the data series from. You can specify as many colors as you like, like -this: - -```js -colors: ["#d18b2c", "#dba255", "#919733"] -``` - -If there are more data series than colors, Flot will try to generate -extra colors by lightening and darkening colors in the theme. - - -## Customizing the grid ## - -```js -grid: { - show: boolean - aboveData: boolean - color: color - backgroundColor: color/gradient or null - margin: number or margin object - labelMargin: number - axisMargin: number - markings: array of markings or (fn: axes -> array of markings) - borderWidth: number or object with "top", "right", "bottom" and "left" properties with different widths - borderColor: color or null or object with "top", "right", "bottom" and "left" properties with different colors - minBorderMargin: number or null - clickable: boolean - hoverable: boolean - autoHighlight: boolean - mouseActiveRadius: number -} - -interaction: { - redrawOverlayInterval: number or -1 -} -``` - -The grid is the thing with the axes and a number of ticks. Many of the -things in the grid are configured under the individual axes, but not -all. "color" is the color of the grid itself whereas "backgroundColor" -specifies the background color inside the grid area, here null means -that the background is transparent. You can also set a gradient, see -the gradient documentation below. - -You can turn off the whole grid including tick labels by setting -"show" to false. "aboveData" determines whether the grid is drawn -above the data or below (below is default). - -"margin" is the space in pixels between the canvas edge and the grid, -which can be either a number or an object with individual margins for -each side, in the form: - -```js -margin: { - top: top margin in pixels - left: left margin in pixels - bottom: bottom margin in pixels - right: right margin in pixels -} -``` - -"labelMargin" is the space in pixels between tick labels and axis -line, and "axisMargin" is the space in pixels between axes when there -are two next to each other. - -"borderWidth" is the width of the border around the plot. Set it to 0 -to disable the border. Set it to an object with "top", "right", -"bottom" and "left" properties to use different widths. You can -also set "borderColor" if you want the border to have a different color -than the grid lines. Set it to an object with "top", "right", "bottom" -and "left" properties to use different colors. "minBorderMargin" controls -the default minimum margin around the border - it's used to make sure -that points aren't accidentally clipped by the canvas edge so by default -the value is computed from the point radius. - -"markings" is used to draw simple lines and rectangular areas in the -background of the plot. You can either specify an array of ranges on -the form { xaxis: { from, to }, yaxis: { from, to } } (with multiple -axes, you can specify coordinates for other axes instead, e.g. as -x2axis/x3axis/...) or with a function that returns such an array given -the axes for the plot in an object as the first parameter. - -You can set the color of markings by specifying "color" in the ranges -object. Here's an example array: - -```js -markings: [ { xaxis: { from: 0, to: 2 }, yaxis: { from: 10, to: 10 }, color: "#bb0000" }, ... ] -``` - -If you leave out one of the values, that value is assumed to go to the -border of the plot. So for example if you only specify { xaxis: { -from: 0, to: 2 } } it means an area that extends from the top to the -bottom of the plot in the x range 0-2. - -A line is drawn if from and to are the same, e.g. - -```js -markings: [ { yaxis: { from: 1, to: 1 } }, ... ] -``` - -would draw a line parallel to the x axis at y = 1. You can control the -line width with "lineWidth" in the range object. - -An example function that makes vertical stripes might look like this: - -```js -markings: function (axes) { - var markings = []; - for (var x = Math.floor(axes.xaxis.min); x < axes.xaxis.max; x += 2) - markings.push({ xaxis: { from: x, to: x + 1 } }); - return markings; -} -``` - -If you set "clickable" to true, the plot will listen for click events -on the plot area and fire a "plotclick" event on the placeholder with -a position and a nearby data item object as parameters. The coordinates -are available both in the unit of the axes (not in pixels) and in -global screen coordinates. - -Likewise, if you set "hoverable" to true, the plot will listen for -mouse move events on the plot area and fire a "plothover" event with -the same parameters as the "plotclick" event. If "autoHighlight" is -true (the default), nearby data items are highlighted automatically. -If needed, you can disable highlighting and control it yourself with -the highlight/unhighlight plot methods described elsewhere. - -You can use "plotclick" and "plothover" events like this: - -```js -$.plot($("#placeholder"), [ d ], { grid: { clickable: true } }); - -$("#placeholder").bind("plotclick", function (event, pos, item) { - alert("You clicked at " + pos.x + ", " + pos.y); - // axis coordinates for other axes, if present, are in pos.x2, pos.x3, ... - // if you need global screen coordinates, they are pos.pageX, pos.pageY - - if (item) { - highlight(item.series, item.datapoint); - alert("You clicked a point!"); - } -}); -``` - -The item object in this example is either null or a nearby object on the form: - -```js -item: { - datapoint: the point, e.g. [0, 2] - dataIndex: the index of the point in the data array - series: the series object - seriesIndex: the index of the series - pageX, pageY: the global screen coordinates of the point -} -``` - -For instance, if you have specified the data like this - -```js -$.plot($("#placeholder"), [ { label: "Foo", data: [[0, 10], [7, 3]] } ], ...); -``` - -and the mouse is near the point (7, 3), "datapoint" is [7, 3], -"dataIndex" will be 1, "series" is a normalized series object with -among other things the "Foo" label in series.label and the color in -series.color, and "seriesIndex" is 0. Note that plugins and options -that transform the data can shift the indexes from what you specified -in the original data array. - -If you use the above events to update some other information and want -to clear out that info in case the mouse goes away, you'll probably -also need to listen to "mouseout" events on the placeholder div. - -"mouseActiveRadius" specifies how far the mouse can be from an item -and still activate it. If there are two or more points within this -radius, Flot chooses the closest item. For bars, the top-most bar -(from the latest specified data series) is chosen. - -If you want to disable interactivity for a specific data series, you -can set "hoverable" and "clickable" to false in the options for that -series, like this: - -```js -{ data: [...], label: "Foo", clickable: false } -``` - -"redrawOverlayInterval" specifies the maximum time to delay a redraw -of interactive things (this works as a rate limiting device). The -default is capped to 60 frames per second. You can set it to -1 to -disable the rate limiting. - - -## Specifying gradients ## - -A gradient is specified like this: - -```js -{ colors: [ color1, color2, ... ] } -``` - -For instance, you might specify a background on the grid going from -black to gray like this: - -```js -grid: { - backgroundColor: { colors: ["#000", "#999"] } -} -``` - -For the series you can specify the gradient as an object that -specifies the scaling of the brightness and the opacity of the series -color, e.g. - -```js -{ colors: [{ opacity: 0.8 }, { brightness: 0.6, opacity: 0.8 } ] } -``` - -where the first color simply has its alpha scaled, whereas the second -is also darkened. For instance, for bars the following makes the bars -gradually disappear, without outline: - -```js -bars: { - show: true, - lineWidth: 0, - fill: true, - fillColor: { colors: [ { opacity: 0.8 }, { opacity: 0.1 } ] } -} -``` - -Flot currently only supports vertical gradients drawn from top to -bottom because that's what works with IE. - - -## Plot Methods ## - -The Plot object returned from the plot function has some methods you -can call: - - - highlight(series, datapoint) - - Highlight a specific datapoint in the data series. You can either - specify the actual objects, e.g. if you got them from a - "plotclick" event, or you can specify the indices, e.g. - highlight(1, 3) to highlight the fourth point in the second series - (remember, zero-based indexing). - - - unhighlight(series, datapoint) or unhighlight() - - Remove the highlighting of the point, same parameters as - highlight. - - If you call unhighlight with no parameters, e.g. as - plot.unhighlight(), all current highlights are removed. - - - setData(data) - - You can use this to reset the data used. Note that axis scaling, - ticks, legend etc. will not be recomputed (use setupGrid() to do - that). You'll probably want to call draw() afterwards. - - You can use this function to speed up redrawing a small plot if - you know that the axes won't change. Put in the new data with - setData(newdata), call draw(), and you're good to go. Note that - for large datasets, almost all the time is consumed in draw() - plotting the data so in this case don't bother. - - - setupGrid() - - Recalculate and set axis scaling, ticks, legend etc. - - Note that because of the drawing model of the canvas, this - function will immediately redraw (actually reinsert in the DOM) - the labels and the legend, but not the actual tick lines because - they're drawn on the canvas. You need to call draw() to get the - canvas redrawn. - - - draw() - - Redraws the plot canvas. - - - triggerRedrawOverlay() - - Schedules an update of an overlay canvas used for drawing - interactive things like a selection and point highlights. This - is mostly useful for writing plugins. The redraw doesn't happen - immediately, instead a timer is set to catch multiple successive - redraws (e.g. from a mousemove). You can get to the overlay by - setting up a drawOverlay hook. - - - width()/height() - - Gets the width and height of the plotting area inside the grid. - This is smaller than the canvas or placeholder dimensions as some - extra space is needed (e.g. for labels). - - - offset() - - Returns the offset of the plotting area inside the grid relative - to the document, useful for instance for calculating mouse - positions (event.pageX/Y minus this offset is the pixel position - inside the plot). - - - pointOffset({ x: xpos, y: ypos }) - - Returns the calculated offset of the data point at (x, y) in data - space within the placeholder div. If you are working with multiple - axes, you can specify the x and y axis references, e.g. - - ```js - o = pointOffset({ x: xpos, y: ypos, xaxis: 2, yaxis: 3 }) - // o.left and o.top now contains the offset within the div - ```` - - - resize() - - Tells Flot to resize the drawing canvas to the size of the - placeholder. You need to run setupGrid() and draw() afterwards as - canvas resizing is a destructive operation. This is used - internally by the resize plugin. - - - shutdown() - - Cleans up any event handlers Flot has currently registered. This - is used internally. - -There are also some members that let you peek inside the internal -workings of Flot which is useful in some cases. Note that if you change -something in the objects returned, you're changing the objects used by -Flot to keep track of its state, so be careful. - - - getData() - - Returns an array of the data series currently used in normalized - form with missing settings filled in according to the global - options. So for instance to find out what color Flot has assigned - to the data series, you could do this: - - ```js - var series = plot.getData(); - for (var i = 0; i < series.length; ++i) - alert(series[i].color); - ``` - - A notable other interesting field besides color is datapoints - which has a field "points" with the normalized data points in a - flat array (the field "pointsize" is the increment in the flat - array to get to the next point so for a dataset consisting only of - (x,y) pairs it would be 2). - - - getAxes() - - Gets an object with the axes. The axes are returned as the - attributes of the object, so for instance getAxes().xaxis is the - x axis. - - Various things are stuffed inside an axis object, e.g. you could - use getAxes().xaxis.ticks to find out what the ticks are for the - xaxis. Two other useful attributes are p2c and c2p, functions for - transforming from data point space to the canvas plot space and - back. Both returns values that are offset with the plot offset. - Check the Flot source code for the complete set of attributes (or - output an axis with console.log() and inspect it). - - With multiple axes, the extra axes are returned as x2axis, x3axis, - etc., e.g. getAxes().y2axis is the second y axis. You can check - y2axis.used to see whether the axis is associated with any data - points and y2axis.show to see if it is currently shown. - - - getPlaceholder() - - Returns placeholder that the plot was put into. This can be useful - for plugins for adding DOM elements or firing events. - - - getCanvas() - - Returns the canvas used for drawing in case you need to hack on it - yourself. You'll probably need to get the plot offset too. - - - getPlotOffset() - - Gets the offset that the grid has within the canvas as an object - with distances from the canvas edges as "left", "right", "top", - "bottom". I.e., if you draw a circle on the canvas with the center - placed at (left, top), its center will be at the top-most, left - corner of the grid. - - - getOptions() - - Gets the options for the plot, normalized, with default values - filled in. You get a reference to actual values used by Flot, so - if you modify the values in here, Flot will use the new values. - If you change something, you probably have to call draw() or - setupGrid() or triggerRedrawOverlay() to see the change. - - -## Hooks ## - -In addition to the public methods, the Plot object also has some hooks -that can be used to modify the plotting process. You can install a -callback function at various points in the process, the function then -gets access to the internal data structures in Flot. - -Here's an overview of the phases Flot goes through: - - 1. Plugin initialization, parsing options - - 2. Constructing the canvases used for drawing - - 3. Set data: parsing data specification, calculating colors, - copying raw data points into internal format, - normalizing them, finding max/min for axis auto-scaling - - 4. Grid setup: calculating axis spacing, ticks, inserting tick - labels, the legend - - 5. Draw: drawing the grid, drawing each of the series in turn - - 6. Setting up event handling for interactive features - - 7. Responding to events, if any - - 8. Shutdown: this mostly happens in case a plot is overwritten - -Each hook is simply a function which is put in the appropriate array. -You can add them through the "hooks" option, and they are also available -after the plot is constructed as the "hooks" attribute on the returned -plot object, e.g. - -```js - // define a simple draw hook - function hellohook(plot, canvascontext) { alert("hello!"); }; - - // pass it in, in an array since we might want to specify several - var plot = $.plot(placeholder, data, { hooks: { draw: [hellohook] } }); - - // we can now find it again in plot.hooks.draw[0] unless a plugin - // has added other hooks -``` - -The available hooks are described below. All hook callbacks get the -plot object as first parameter. You can find some examples of defined -hooks in the plugins bundled with Flot. - - - processOptions [phase 1] - - ```function(plot, options)``` - - Called after Flot has parsed and merged options. Useful in the - instance where customizations beyond simple merging of default - values is needed. A plugin might use it to detect that it has been - enabled and then turn on or off other options. - - - - processRawData [phase 3] - - ```function(plot, series, data, datapoints)``` - - Called before Flot copies and normalizes the raw data for the given - series. If the function fills in datapoints.points with normalized - points and sets datapoints.pointsize to the size of the points, - Flot will skip the copying/normalization step for this series. - - In any case, you might be interested in setting datapoints.format, - an array of objects for specifying how a point is normalized and - how it interferes with axis scaling. It accepts the following options: - - ```js - { - x, y: boolean, - number: boolean, - required: boolean, - defaultValue: value, - autoscale: boolean - } - ``` - - "x" and "y" specify whether the value is plotted against the x or y axis, - and is currently used only to calculate axis min-max ranges. The default - format array, for example, looks like this: - - ```js - [ - { x: true, number: true, required: true }, - { y: true, number: true, required: true } - ] - ``` - - This indicates that a point, i.e. [0, 25], consists of two values, with the - first being plotted on the x axis and the second on the y axis. - - If "number" is true, then the value must be numeric, and is set to null if - it cannot be converted to a number. - - "defaultValue" provides a fallback in case the original value is null. This - is for instance handy for bars, where one can omit the third coordinate - (the bottom of the bar), which then defaults to zero. - - If "required" is true, then the value must exist (be non-null) for the - point as a whole to be valid. If no value is provided, then the entire - point is cleared out with nulls, turning it into a gap in the series. - - "autoscale" determines whether the value is considered when calculating an - automatic min-max range for the axes that the value is plotted against. - - - processDatapoints [phase 3] - - ```function(plot, series, datapoints)``` - - Called after normalization of the given series but before finding - min/max of the data points. This hook is useful for implementing data - transformations. "datapoints" contains the normalized data points in - a flat array as datapoints.points with the size of a single point - given in datapoints.pointsize. Here's a simple transform that - multiplies all y coordinates by 2: - - ```js - function multiply(plot, series, datapoints) { - var points = datapoints.points, ps = datapoints.pointsize; - for (var i = 0; i < points.length; i += ps) - points[i + 1] *= 2; - } - ``` - - Note that you must leave datapoints in a good condition as Flot - doesn't check it or do any normalization on it afterwards. - - - processOffset [phase 4] - - ```function(plot, offset)``` - - Called after Flot has initialized the plot's offset, but before it - draws any axes or plot elements. This hook is useful for customizing - the margins between the grid and the edge of the canvas. "offset" is - an object with attributes "top", "bottom", "left" and "right", - corresponding to the margins on the four sides of the plot. - - - drawBackground [phase 5] - - ```function(plot, canvascontext)``` - - Called before all other drawing operations. Used to draw backgrounds - or other custom elements before the plot or axes have been drawn. - - - drawSeries [phase 5] - - ```function(plot, canvascontext, series)``` - - Hook for custom drawing of a single series. Called just before the - standard drawing routine has been called in the loop that draws - each series. - - - draw [phase 5] - - ```function(plot, canvascontext)``` - - Hook for drawing on the canvas. Called after the grid is drawn - (unless it's disabled or grid.aboveData is set) and the series have - been plotted (in case any points, lines or bars have been turned - on). For examples of how to draw things, look at the source code. - - - bindEvents [phase 6] - - ```function(plot, eventHolder)``` - - Called after Flot has setup its event handlers. Should set any - necessary event handlers on eventHolder, a jQuery object with the - canvas, e.g. - - ```js - function (plot, eventHolder) { - eventHolder.mousedown(function (e) { - alert("You pressed the mouse at " + e.pageX + " " + e.pageY); - }); - } - ``` - - Interesting events include click, mousemove, mouseup/down. You can - use all jQuery events. Usually, the event handlers will update the - state by drawing something (add a drawOverlay hook and call - triggerRedrawOverlay) or firing an externally visible event for - user code. See the crosshair plugin for an example. - - Currently, eventHolder actually contains both the static canvas - used for the plot itself and the overlay canvas used for - interactive features because some versions of IE get the stacking - order wrong. The hook only gets one event, though (either for the - overlay or for the static canvas). - - Note that custom plot events generated by Flot are not generated on - eventHolder, but on the div placeholder supplied as the first - argument to the plot call. You can get that with - plot.getPlaceholder() - that's probably also the one you should use - if you need to fire a custom event. - - - drawOverlay [phase 7] - - ```function (plot, canvascontext)``` - - The drawOverlay hook is used for interactive things that need a - canvas to draw on. The model currently used by Flot works the way - that an extra overlay canvas is positioned on top of the static - canvas. This overlay is cleared and then completely redrawn - whenever something interesting happens. This hook is called when - the overlay canvas is to be redrawn. - - "canvascontext" is the 2D context of the overlay canvas. You can - use this to draw things. You'll most likely need some of the - metrics computed by Flot, e.g. plot.width()/plot.height(). See the - crosshair plugin for an example. - - - shutdown [phase 8] - - ```function (plot, eventHolder)``` - - Run when plot.shutdown() is called, which usually only happens in - case a plot is overwritten by a new plot. If you're writing a - plugin that adds extra DOM elements or event handlers, you should - add a callback to clean up after you. Take a look at the section in - the [PLUGINS](PLUGINS.md) document for more info. - - -## Plugins ## - -Plugins extend the functionality of Flot. To use a plugin, simply -include its JavaScript file after Flot in the HTML page. - -If you're worried about download size/latency, you can concatenate all -the plugins you use, and Flot itself for that matter, into one big file -(make sure you get the order right), then optionally run it through a -JavaScript minifier such as YUI Compressor. - -Here's a brief explanation of how the plugin plumbings work: - -Each plugin registers itself in the global array $.plot.plugins. When -you make a new plot object with $.plot, Flot goes through this array -calling the "init" function of each plugin and merging default options -from the "option" attribute of the plugin. The init function gets a -reference to the plot object created and uses this to register hooks -and add new public methods if needed. - -See the [PLUGINS](PLUGINS.md) document for details on how to write a plugin. As the -above description hints, it's actually pretty easy. - - -## Version number ## - -The version number of Flot is available in ```$.plot.version```. diff --git a/src/legacy/ui/public/flot-charts/index.js b/src/legacy/ui/public/flot-charts/index.js deleted file mode 100644 index f98aca30d2dec..0000000000000 --- a/src/legacy/ui/public/flot-charts/index.js +++ /dev/null @@ -1,42 +0,0 @@ -/* @notice - * - * This product includes code that is based on flot-charts, which was available - * under a "MIT" license. - * - * The MIT License (MIT) - * - * Copyright (c) 2007-2014 IOLA and Ole Laursen - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. - */ - -import $ from 'jquery'; -if (window) window.jQuery = $; -require('ui/flot-charts/jquery.flot'); -require('ui/flot-charts/jquery.flot.time'); -require('ui/flot-charts/jquery.flot.canvas'); -require('ui/flot-charts/jquery.flot.symbol'); -require('ui/flot-charts/jquery.flot.crosshair'); -require('ui/flot-charts/jquery.flot.selection'); -require('ui/flot-charts/jquery.flot.pie'); -require('ui/flot-charts/jquery.flot.stack'); -require('ui/flot-charts/jquery.flot.threshold'); -require('ui/flot-charts/jquery.flot.fillbetween'); -require('ui/flot-charts/jquery.flot.log'); -module.exports = $; diff --git a/src/legacy/ui/public/flot-charts/jquery.colorhelpers.js b/src/legacy/ui/public/flot-charts/jquery.colorhelpers.js deleted file mode 100644 index b2f6dc4e433a3..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.colorhelpers.js +++ /dev/null @@ -1,180 +0,0 @@ -/* Plugin for jQuery for working with colors. - * - * Version 1.1. - * - * Inspiration from jQuery color animation plugin by John Resig. - * - * Released under the MIT license by Ole Laursen, October 2009. - * - * Examples: - * - * $.color.parse("#fff").scale('rgb', 0.25).add('a', -0.5).toString() - * var c = $.color.extract($("#mydiv"), 'background-color'); - * console.log(c.r, c.g, c.b, c.a); - * $.color.make(100, 50, 25, 0.4).toString() // returns "rgba(100,50,25,0.4)" - * - * Note that .scale() and .add() return the same modified object - * instead of making a new one. - * - * V. 1.1: Fix error handling so e.g. parsing an empty string does - * produce a color rather than just crashing. - */ - -(function($) { - $.color = {}; - - // construct color object with some convenient chainable helpers - $.color.make = function (r, g, b, a) { - var o = {}; - o.r = r || 0; - o.g = g || 0; - o.b = b || 0; - o.a = a != null ? a : 1; - - o.add = function (c, d) { - for (var i = 0; i < c.length; ++i) - o[c.charAt(i)] += d; - return o.normalize(); - }; - - o.scale = function (c, f) { - for (var i = 0; i < c.length; ++i) - o[c.charAt(i)] *= f; - return o.normalize(); - }; - - o.toString = function () { - if (o.a >= 1.0) { - return "rgb("+[o.r, o.g, o.b].join(",")+")"; - } else { - return "rgba("+[o.r, o.g, o.b, o.a].join(",")+")"; - } - }; - - o.normalize = function () { - function clamp(min, value, max) { - return value < min ? min: (value > max ? max: value); - } - - o.r = clamp(0, parseInt(o.r), 255); - o.g = clamp(0, parseInt(o.g), 255); - o.b = clamp(0, parseInt(o.b), 255); - o.a = clamp(0, o.a, 1); - return o; - }; - - o.clone = function () { - return $.color.make(o.r, o.b, o.g, o.a); - }; - - return o.normalize(); - } - - // extract CSS color property from element, going up in the DOM - // if it's "transparent" - $.color.extract = function (elem, css) { - var c; - - do { - c = elem.css(css).toLowerCase(); - // keep going until we find an element that has color, or - // we hit the body or root (have no parent) - if (c != '' && c != 'transparent') - break; - elem = elem.parent(); - } while (elem.length && !$.nodeName(elem.get(0), "body")); - - // catch Safari's way of signalling transparent - if (c == "rgba(0, 0, 0, 0)") - c = "transparent"; - - return $.color.parse(c); - } - - // parse CSS color string (like "rgb(10, 32, 43)" or "#fff"), - // returns color object, if parsing failed, you get black (0, 0, - // 0) out - $.color.parse = function (str) { - var res, m = $.color.make; - - // Look for rgb(num,num,num) - if (res = /rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(str)) - return m(parseInt(res[1], 10), parseInt(res[2], 10), parseInt(res[3], 10)); - - // Look for rgba(num,num,num,num) - if (res = /rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(str)) - return m(parseInt(res[1], 10), parseInt(res[2], 10), parseInt(res[3], 10), parseFloat(res[4])); - - // Look for rgb(num%,num%,num%) - if (res = /rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(str)) - return m(parseFloat(res[1])*2.55, parseFloat(res[2])*2.55, parseFloat(res[3])*2.55); - - // Look for rgba(num%,num%,num%,num) - if (res = /rgba\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(str)) - return m(parseFloat(res[1])*2.55, parseFloat(res[2])*2.55, parseFloat(res[3])*2.55, parseFloat(res[4])); - - // Look for #a0b1c2 - if (res = /#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(str)) - return m(parseInt(res[1], 16), parseInt(res[2], 16), parseInt(res[3], 16)); - - // Look for #fff - if (res = /#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(str)) - return m(parseInt(res[1]+res[1], 16), parseInt(res[2]+res[2], 16), parseInt(res[3]+res[3], 16)); - - // Otherwise, we're most likely dealing with a named color - var name = $.trim(str).toLowerCase(); - if (name == "transparent") - return m(255, 255, 255, 0); - else { - // default to black - res = lookupColors[name] || [0, 0, 0]; - return m(res[0], res[1], res[2]); - } - } - - var lookupColors = { - aqua:[0,255,255], - azure:[240,255,255], - beige:[245,245,220], - black:[0,0,0], - blue:[0,0,255], - brown:[165,42,42], - cyan:[0,255,255], - darkblue:[0,0,139], - darkcyan:[0,139,139], - darkgrey:[169,169,169], - darkgreen:[0,100,0], - darkkhaki:[189,183,107], - darkmagenta:[139,0,139], - darkolivegreen:[85,107,47], - darkorange:[255,140,0], - darkorchid:[153,50,204], - darkred:[139,0,0], - darksalmon:[233,150,122], - darkviolet:[148,0,211], - fuchsia:[255,0,255], - gold:[255,215,0], - green:[0,128,0], - indigo:[75,0,130], - khaki:[240,230,140], - lightblue:[173,216,230], - lightcyan:[224,255,255], - lightgreen:[144,238,144], - lightgrey:[211,211,211], - lightpink:[255,182,193], - lightyellow:[255,255,224], - lime:[0,255,0], - magenta:[255,0,255], - maroon:[128,0,0], - navy:[0,0,128], - olive:[128,128,0], - orange:[255,165,0], - pink:[255,192,203], - purple:[128,0,128], - violet:[128,0,128], - red:[255,0,0], - silver:[192,192,192], - white:[255,255,255], - yellow:[255,255,0] - }; -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.canvas.js b/src/legacy/ui/public/flot-charts/jquery.flot.canvas.js deleted file mode 100644 index 29328d5812127..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.canvas.js +++ /dev/null @@ -1,345 +0,0 @@ -/* Flot plugin for drawing all elements of a plot on the canvas. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -Flot normally produces certain elements, like axis labels and the legend, using -HTML elements. This permits greater interactivity and customization, and often -looks better, due to cross-browser canvas text inconsistencies and limitations. - -It can also be desirable to render the plot entirely in canvas, particularly -if the goal is to save it as an image, or if Flot is being used in a context -where the HTML DOM does not exist, as is the case within Node.js. This plugin -switches out Flot's standard drawing operations for canvas-only replacements. - -Currently the plugin supports only axis labels, but it will eventually allow -every element of the plot to be rendered directly to canvas. - -The plugin supports these options: - -{ - canvas: boolean -} - -The "canvas" option controls whether full canvas drawing is enabled, making it -possible to toggle on and off. This is useful when a plot uses HTML text in the -browser, but needs to redraw with canvas text when exporting as an image. - -*/ - -(function($) { - - var options = { - canvas: true - }; - - var render, getTextInfo, addText; - - // Cache the prototype hasOwnProperty for faster access - - var hasOwnProperty = Object.prototype.hasOwnProperty; - - function init(plot, classes) { - - var Canvas = classes.Canvas; - - // We only want to replace the functions once; the second time around - // we would just get our new function back. This whole replacing of - // prototype functions is a disaster, and needs to be changed ASAP. - - if (render == null) { - getTextInfo = Canvas.prototype.getTextInfo, - addText = Canvas.prototype.addText, - render = Canvas.prototype.render; - } - - // Finishes rendering the canvas, including overlaid text - - Canvas.prototype.render = function() { - - if (!plot.getOptions().canvas) { - return render.call(this); - } - - var context = this.context, - cache = this._textCache; - - // For each text layer, render elements marked as active - - context.save(); - context.textBaseline = "middle"; - - for (var layerKey in cache) { - if (hasOwnProperty.call(cache, layerKey)) { - var layerCache = cache[layerKey]; - for (var styleKey in layerCache) { - if (hasOwnProperty.call(layerCache, styleKey)) { - var styleCache = layerCache[styleKey], - updateStyles = true; - for (var key in styleCache) { - if (hasOwnProperty.call(styleCache, key)) { - - var info = styleCache[key], - positions = info.positions, - lines = info.lines; - - // Since every element at this level of the cache have the - // same font and fill styles, we can just change them once - // using the values from the first element. - - if (updateStyles) { - context.fillStyle = info.font.color; - context.font = info.font.definition; - updateStyles = false; - } - - for (var i = 0, position; position = positions[i]; i++) { - if (position.active) { - for (var j = 0, line; line = position.lines[j]; j++) { - context.fillText(lines[j].text, line[0], line[1]); - } - } else { - positions.splice(i--, 1); - } - } - - if (positions.length == 0) { - delete styleCache[key]; - } - } - } - } - } - } - } - - context.restore(); - }; - - // Creates (if necessary) and returns a text info object. - // - // When the canvas option is set, the object looks like this: - // - // { - // width: Width of the text's bounding box. - // height: Height of the text's bounding box. - // positions: Array of positions at which this text is drawn. - // lines: [{ - // height: Height of this line. - // widths: Width of this line. - // text: Text on this line. - // }], - // font: { - // definition: Canvas font property string. - // color: Color of the text. - // }, - // } - // - // The positions array contains objects that look like this: - // - // { - // active: Flag indicating whether the text should be visible. - // lines: Array of [x, y] coordinates at which to draw the line. - // x: X coordinate at which to draw the text. - // y: Y coordinate at which to draw the text. - // } - - Canvas.prototype.getTextInfo = function(layer, text, font, angle, width) { - - if (!plot.getOptions().canvas) { - return getTextInfo.call(this, layer, text, font, angle, width); - } - - var textStyle, layerCache, styleCache, info; - - // Cast the value to a string, in case we were given a number - - text = "" + text; - - // If the font is a font-spec object, generate a CSS definition - - if (typeof font === "object") { - textStyle = font.style + " " + font.variant + " " + font.weight + " " + font.size + "px " + font.family; - } else { - textStyle = font; - } - - // Retrieve (or create) the cache for the text's layer and styles - - layerCache = this._textCache[layer]; - - if (layerCache == null) { - layerCache = this._textCache[layer] = {}; - } - - styleCache = layerCache[textStyle]; - - if (styleCache == null) { - styleCache = layerCache[textStyle] = {}; - } - - info = styleCache[text]; - - if (info == null) { - - var context = this.context; - - // If the font was provided as CSS, create a div with those - // classes and examine it to generate a canvas font spec. - - if (typeof font !== "object") { - - var element = $("
 
") - .css("position", "absolute") - .addClass(typeof font === "string" ? font : null) - .appendTo(this.getTextLayer(layer)); - - font = { - lineHeight: element.height(), - style: element.css("font-style"), - variant: element.css("font-variant"), - weight: element.css("font-weight"), - family: element.css("font-family"), - color: element.css("color") - }; - - // Setting line-height to 1, without units, sets it equal - // to the font-size, even if the font-size is abstract, - // like 'smaller'. This enables us to read the real size - // via the element's height, working around browsers that - // return the literal 'smaller' value. - - font.size = element.css("line-height", 1).height(); - - element.remove(); - } - - textStyle = font.style + " " + font.variant + " " + font.weight + " " + font.size + "px " + font.family; - - // Create a new info object, initializing the dimensions to - // zero so we can count them up line-by-line. - - info = styleCache[text] = { - width: 0, - height: 0, - positions: [], - lines: [], - font: { - definition: textStyle, - color: font.color - } - }; - - context.save(); - context.font = textStyle; - - // Canvas can't handle multi-line strings; break on various - // newlines, including HTML brs, to build a list of lines. - // Note that we could split directly on regexps, but IE < 9 is - // broken; revisit when we drop IE 7/8 support. - - var lines = (text + "").replace(/
|\r\n|\r/g, "\n").split("\n"); - - for (var i = 0; i < lines.length; ++i) { - - var lineText = lines[i], - measured = context.measureText(lineText); - - info.width = Math.max(measured.width, info.width); - info.height += font.lineHeight; - - info.lines.push({ - text: lineText, - width: measured.width, - height: font.lineHeight - }); - } - - context.restore(); - } - - return info; - }; - - // Adds a text string to the canvas text overlay. - - Canvas.prototype.addText = function(layer, x, y, text, font, angle, width, halign, valign) { - - if (!plot.getOptions().canvas) { - return addText.call(this, layer, x, y, text, font, angle, width, halign, valign); - } - - var info = this.getTextInfo(layer, text, font, angle, width), - positions = info.positions, - lines = info.lines; - - // Text is drawn with baseline 'middle', which we need to account - // for by adding half a line's height to the y position. - - y += info.height / lines.length / 2; - - // Tweak the initial y-position to match vertical alignment - - if (valign == "middle") { - y = Math.round(y - info.height / 2); - } else if (valign == "bottom") { - y = Math.round(y - info.height); - } else { - y = Math.round(y); - } - - // FIXME: LEGACY BROWSER FIX - // AFFECTS: Opera < 12.00 - - // Offset the y coordinate, since Opera is off pretty - // consistently compared to the other browsers. - - if (!!(window.opera && window.opera.version().split(".")[0] < 12)) { - y -= 2; - } - - // Determine whether this text already exists at this position. - // If so, mark it for inclusion in the next render pass. - - for (var i = 0, position; position = positions[i]; i++) { - if (position.x == x && position.y == y) { - position.active = true; - return; - } - } - - // If the text doesn't exist at this position, create a new entry - - position = { - active: true, - lines: [], - x: x, - y: y - }; - - positions.push(position); - - // Fill in the x & y positions of each line, adjusting them - // individually for horizontal alignment. - - for (var i = 0, line; line = lines[i]; i++) { - if (halign == "center") { - position.lines.push([Math.round(x - line.width / 2), y]); - } else if (halign == "right") { - position.lines.push([Math.round(x - line.width), y]); - } else { - position.lines.push([Math.round(x), y]); - } - y += line.height; - } - }; - } - - $.plot.plugins.push({ - init: init, - options: options, - name: "canvas", - version: "1.0" - }); - -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.categories.js b/src/legacy/ui/public/flot-charts/jquery.flot.categories.js deleted file mode 100644 index 2f9b257971499..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.categories.js +++ /dev/null @@ -1,190 +0,0 @@ -/* Flot plugin for plotting textual data or categories. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -Consider a dataset like [["February", 34], ["March", 20], ...]. This plugin -allows you to plot such a dataset directly. - -To enable it, you must specify mode: "categories" on the axis with the textual -labels, e.g. - - $.plot("#placeholder", data, { xaxis: { mode: "categories" } }); - -By default, the labels are ordered as they are met in the data series. If you -need a different ordering, you can specify "categories" on the axis options -and list the categories there: - - xaxis: { - mode: "categories", - categories: ["February", "March", "April"] - } - -If you need to customize the distances between the categories, you can specify -"categories" as an object mapping labels to values - - xaxis: { - mode: "categories", - categories: { "February": 1, "March": 3, "April": 4 } - } - -If you don't specify all categories, the remaining categories will be numbered -from the max value plus 1 (with a spacing of 1 between each). - -Internally, the plugin works by transforming the input data through an auto- -generated mapping where the first category becomes 0, the second 1, etc. -Hence, a point like ["February", 34] becomes [0, 34] internally in Flot (this -is visible in hover and click events that return numbers rather than the -category labels). The plugin also overrides the tick generator to spit out the -categories as ticks instead of the values. - -If you need to map a value back to its label, the mapping is always accessible -as "categories" on the axis object, e.g. plot.getAxes().xaxis.categories. - -*/ - -(function ($) { - var options = { - xaxis: { - categories: null - }, - yaxis: { - categories: null - } - }; - - function processRawData(plot, series, data, datapoints) { - // if categories are enabled, we need to disable - // auto-transformation to numbers so the strings are intact - // for later processing - - var xCategories = series.xaxis.options.mode == "categories", - yCategories = series.yaxis.options.mode == "categories"; - - if (!(xCategories || yCategories)) - return; - - var format = datapoints.format; - - if (!format) { - // FIXME: auto-detection should really not be defined here - var s = series; - format = []; - format.push({ x: true, number: true, required: true }); - format.push({ y: true, number: true, required: true }); - - if (s.bars.show || (s.lines.show && s.lines.fill)) { - var autoscale = !!((s.bars.show && s.bars.zero) || (s.lines.show && s.lines.zero)); - format.push({ y: true, number: true, required: false, defaultValue: 0, autoscale: autoscale }); - if (s.bars.horizontal) { - delete format[format.length - 1].y; - format[format.length - 1].x = true; - } - } - - datapoints.format = format; - } - - for (var m = 0; m < format.length; ++m) { - if (format[m].x && xCategories) - format[m].number = false; - - if (format[m].y && yCategories) - format[m].number = false; - } - } - - function getNextIndex(categories) { - var index = -1; - - for (var v in categories) - if (categories[v] > index) - index = categories[v]; - - return index + 1; - } - - function categoriesTickGenerator(axis) { - var res = []; - for (var label in axis.categories) { - var v = axis.categories[label]; - if (v >= axis.min && v <= axis.max) - res.push([v, label]); - } - - res.sort(function (a, b) { return a[0] - b[0]; }); - - return res; - } - - function setupCategoriesForAxis(series, axis, datapoints) { - if (series[axis].options.mode != "categories") - return; - - if (!series[axis].categories) { - // parse options - var c = {}, o = series[axis].options.categories || {}; - if ($.isArray(o)) { - for (var i = 0; i < o.length; ++i) - c[o[i]] = i; - } - else { - for (var v in o) - c[v] = o[v]; - } - - series[axis].categories = c; - } - - // fix ticks - if (!series[axis].options.ticks) - series[axis].options.ticks = categoriesTickGenerator; - - transformPointsOnAxis(datapoints, axis, series[axis].categories); - } - - function transformPointsOnAxis(datapoints, axis, categories) { - // go through the points, transforming them - var points = datapoints.points, - ps = datapoints.pointsize, - format = datapoints.format, - formatColumn = axis.charAt(0), - index = getNextIndex(categories); - - for (var i = 0; i < points.length; i += ps) { - if (points[i] == null) - continue; - - for (var m = 0; m < ps; ++m) { - var val = points[i + m]; - - if (val == null || !format[m][formatColumn]) - continue; - - if (!(val in categories)) { - categories[val] = index; - ++index; - } - - points[i + m] = categories[val]; - } - } - } - - function processDatapoints(plot, series, datapoints) { - setupCategoriesForAxis(series, "xaxis", datapoints); - setupCategoriesForAxis(series, "yaxis", datapoints); - } - - function init(plot) { - plot.hooks.processRawData.push(processRawData); - plot.hooks.processDatapoints.push(processDatapoints); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'categories', - version: '1.0' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.crosshair.js b/src/legacy/ui/public/flot-charts/jquery.flot.crosshair.js deleted file mode 100644 index 5111695e3d12c..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.crosshair.js +++ /dev/null @@ -1,176 +0,0 @@ -/* Flot plugin for showing crosshairs when the mouse hovers over the plot. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The plugin supports these options: - - crosshair: { - mode: null or "x" or "y" or "xy" - color: color - lineWidth: number - } - -Set the mode to one of "x", "y" or "xy". The "x" mode enables a vertical -crosshair that lets you trace the values on the x axis, "y" enables a -horizontal crosshair and "xy" enables them both. "color" is the color of the -crosshair (default is "rgba(170, 0, 0, 0.80)"), "lineWidth" is the width of -the drawn lines (default is 1). - -The plugin also adds four public methods: - - - setCrosshair( pos ) - - Set the position of the crosshair. Note that this is cleared if the user - moves the mouse. "pos" is in coordinates of the plot and should be on the - form { x: xpos, y: ypos } (you can use x2/x3/... if you're using multiple - axes), which is coincidentally the same format as what you get from a - "plothover" event. If "pos" is null, the crosshair is cleared. - - - clearCrosshair() - - Clear the crosshair. - - - lockCrosshair(pos) - - Cause the crosshair to lock to the current location, no longer updating if - the user moves the mouse. Optionally supply a position (passed on to - setCrosshair()) to move it to. - - Example usage: - - var myFlot = $.plot( $("#graph"), ..., { crosshair: { mode: "x" } } }; - $("#graph").bind( "plothover", function ( evt, position, item ) { - if ( item ) { - // Lock the crosshair to the data point being hovered - myFlot.lockCrosshair({ - x: item.datapoint[ 0 ], - y: item.datapoint[ 1 ] - }); - } else { - // Return normal crosshair operation - myFlot.unlockCrosshair(); - } - }); - - - unlockCrosshair() - - Free the crosshair to move again after locking it. -*/ - -(function ($) { - var options = { - crosshair: { - mode: null, // one of null, "x", "y" or "xy", - color: "rgba(170, 0, 0, 0.80)", - lineWidth: 1 - } - }; - - function init(plot) { - // position of crosshair in pixels - var crosshair = { x: -1, y: -1, locked: false }; - - plot.setCrosshair = function setCrosshair(pos) { - if (!pos) - crosshair.x = -1; - else { - var o = plot.p2c(pos); - crosshair.x = Math.max(0, Math.min(o.left, plot.width())); - crosshair.y = Math.max(0, Math.min(o.top, plot.height())); - } - - plot.triggerRedrawOverlay(); - }; - - plot.clearCrosshair = plot.setCrosshair; // passes null for pos - - plot.lockCrosshair = function lockCrosshair(pos) { - if (pos) - plot.setCrosshair(pos); - crosshair.locked = true; - }; - - plot.unlockCrosshair = function unlockCrosshair() { - crosshair.locked = false; - }; - - function onMouseOut(e) { - if (crosshair.locked) - return; - - if (crosshair.x != -1) { - crosshair.x = -1; - plot.triggerRedrawOverlay(); - } - } - - function onMouseMove(e) { - if (crosshair.locked) - return; - - if (plot.getSelection && plot.getSelection()) { - crosshair.x = -1; // hide the crosshair while selecting - return; - } - - var offset = plot.offset(); - crosshair.x = Math.max(0, Math.min(e.pageX - offset.left, plot.width())); - crosshair.y = Math.max(0, Math.min(e.pageY - offset.top, plot.height())); - plot.triggerRedrawOverlay(); - } - - plot.hooks.bindEvents.push(function (plot, eventHolder) { - if (!plot.getOptions().crosshair.mode) - return; - - eventHolder.mouseout(onMouseOut); - eventHolder.mousemove(onMouseMove); - }); - - plot.hooks.drawOverlay.push(function (plot, ctx) { - var c = plot.getOptions().crosshair; - if (!c.mode) - return; - - var plotOffset = plot.getPlotOffset(); - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - if (crosshair.x != -1) { - var adj = plot.getOptions().crosshair.lineWidth % 2 ? 0.5 : 0; - - ctx.strokeStyle = c.color; - ctx.lineWidth = c.lineWidth; - ctx.lineJoin = "round"; - - ctx.beginPath(); - if (c.mode.indexOf("x") != -1) { - var drawX = Math.floor(crosshair.x) + adj; - ctx.moveTo(drawX, 0); - ctx.lineTo(drawX, plot.height()); - } - if (c.mode.indexOf("y") != -1) { - var drawY = Math.floor(crosshair.y) + adj; - ctx.moveTo(0, drawY); - ctx.lineTo(plot.width(), drawY); - } - ctx.stroke(); - } - ctx.restore(); - }); - - plot.hooks.shutdown.push(function (plot, eventHolder) { - eventHolder.unbind("mouseout", onMouseOut); - eventHolder.unbind("mousemove", onMouseMove); - }); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'crosshair', - version: '1.0' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.errorbars.js b/src/legacy/ui/public/flot-charts/jquery.flot.errorbars.js deleted file mode 100644 index 655036e0db846..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.errorbars.js +++ /dev/null @@ -1,353 +0,0 @@ -/* Flot plugin for plotting error bars. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -Error bars are used to show standard deviation and other statistical -properties in a plot. - -* Created by Rui Pereira - rui (dot) pereira (at) gmail (dot) com - -This plugin allows you to plot error-bars over points. Set "errorbars" inside -the points series to the axis name over which there will be error values in -your data array (*even* if you do not intend to plot them later, by setting -"show: null" on xerr/yerr). - -The plugin supports these options: - - series: { - points: { - errorbars: "x" or "y" or "xy", - xerr: { - show: null/false or true, - asymmetric: null/false or true, - upperCap: null or "-" or function, - lowerCap: null or "-" or function, - color: null or color, - radius: null or number - }, - yerr: { same options as xerr } - } - } - -Each data point array is expected to be of the type: - - "x" [ x, y, xerr ] - "y" [ x, y, yerr ] - "xy" [ x, y, xerr, yerr ] - -Where xerr becomes xerr_lower,xerr_upper for the asymmetric error case, and -equivalently for yerr. E.g., a datapoint for the "xy" case with symmetric -error-bars on X and asymmetric on Y would be: - - [ x, y, xerr, yerr_lower, yerr_upper ] - -By default no end caps are drawn. Setting upperCap and/or lowerCap to "-" will -draw a small cap perpendicular to the error bar. They can also be set to a -user-defined drawing function, with (ctx, x, y, radius) as parameters, as e.g.: - - function drawSemiCircle( ctx, x, y, radius ) { - ctx.beginPath(); - ctx.arc( x, y, radius, 0, Math.PI, false ); - ctx.moveTo( x - radius, y ); - ctx.lineTo( x + radius, y ); - ctx.stroke(); - } - -Color and radius both default to the same ones of the points series if not -set. The independent radius parameter on xerr/yerr is useful for the case when -we may want to add error-bars to a line, without showing the interconnecting -points (with radius: 0), and still showing end caps on the error-bars. -shadowSize and lineWidth are derived as well from the points series. - -*/ - -(function ($) { - var options = { - series: { - points: { - errorbars: null, //should be 'x', 'y' or 'xy' - xerr: { err: 'x', show: null, asymmetric: null, upperCap: null, lowerCap: null, color: null, radius: null}, - yerr: { err: 'y', show: null, asymmetric: null, upperCap: null, lowerCap: null, color: null, radius: null} - } - } - }; - - function processRawData(plot, series, data, datapoints){ - if (!series.points.errorbars) - return; - - // x,y values - var format = [ - { x: true, number: true, required: true }, - { y: true, number: true, required: true } - ]; - - var errors = series.points.errorbars; - // error bars - first X then Y - if (errors == 'x' || errors == 'xy') { - // lower / upper error - if (series.points.xerr.asymmetric) { - format.push({ x: true, number: true, required: true }); - format.push({ x: true, number: true, required: true }); - } else - format.push({ x: true, number: true, required: true }); - } - if (errors == 'y' || errors == 'xy') { - // lower / upper error - if (series.points.yerr.asymmetric) { - format.push({ y: true, number: true, required: true }); - format.push({ y: true, number: true, required: true }); - } else - format.push({ y: true, number: true, required: true }); - } - datapoints.format = format; - } - - function parseErrors(series, i){ - - var points = series.datapoints.points; - - // read errors from points array - var exl = null, - exu = null, - eyl = null, - eyu = null; - var xerr = series.points.xerr, - yerr = series.points.yerr; - - var eb = series.points.errorbars; - // error bars - first X - if (eb == 'x' || eb == 'xy') { - if (xerr.asymmetric) { - exl = points[i + 2]; - exu = points[i + 3]; - if (eb == 'xy') - if (yerr.asymmetric){ - eyl = points[i + 4]; - eyu = points[i + 5]; - } else eyl = points[i + 4]; - } else { - exl = points[i + 2]; - if (eb == 'xy') - if (yerr.asymmetric) { - eyl = points[i + 3]; - eyu = points[i + 4]; - } else eyl = points[i + 3]; - } - // only Y - } else if (eb == 'y') - if (yerr.asymmetric) { - eyl = points[i + 2]; - eyu = points[i + 3]; - } else eyl = points[i + 2]; - - // symmetric errors? - if (exu == null) exu = exl; - if (eyu == null) eyu = eyl; - - var errRanges = [exl, exu, eyl, eyu]; - // nullify if not showing - if (!xerr.show){ - errRanges[0] = null; - errRanges[1] = null; - } - if (!yerr.show){ - errRanges[2] = null; - errRanges[3] = null; - } - return errRanges; - } - - function drawSeriesErrors(plot, ctx, s){ - - var points = s.datapoints.points, - ps = s.datapoints.pointsize, - ax = [s.xaxis, s.yaxis], - radius = s.points.radius, - err = [s.points.xerr, s.points.yerr]; - - //sanity check, in case some inverted axis hack is applied to flot - var invertX = false; - if (ax[0].p2c(ax[0].max) < ax[0].p2c(ax[0].min)) { - invertX = true; - var tmp = err[0].lowerCap; - err[0].lowerCap = err[0].upperCap; - err[0].upperCap = tmp; - } - - var invertY = false; - if (ax[1].p2c(ax[1].min) < ax[1].p2c(ax[1].max)) { - invertY = true; - var tmp = err[1].lowerCap; - err[1].lowerCap = err[1].upperCap; - err[1].upperCap = tmp; - } - - for (var i = 0; i < s.datapoints.points.length; i += ps) { - - //parse - var errRanges = parseErrors(s, i); - - //cycle xerr & yerr - for (var e = 0; e < err.length; e++){ - - var minmax = [ax[e].min, ax[e].max]; - - //draw this error? - if (errRanges[e * err.length]){ - - //data coordinates - var x = points[i], - y = points[i + 1]; - - //errorbar ranges - var upper = [x, y][e] + errRanges[e * err.length + 1], - lower = [x, y][e] - errRanges[e * err.length]; - - //points outside of the canvas - if (err[e].err == 'x') - if (y > ax[1].max || y < ax[1].min || upper < ax[0].min || lower > ax[0].max) - continue; - if (err[e].err == 'y') - if (x > ax[0].max || x < ax[0].min || upper < ax[1].min || lower > ax[1].max) - continue; - - // prevent errorbars getting out of the canvas - var drawUpper = true, - drawLower = true; - - if (upper > minmax[1]) { - drawUpper = false; - upper = minmax[1]; - } - if (lower < minmax[0]) { - drawLower = false; - lower = minmax[0]; - } - - //sanity check, in case some inverted axis hack is applied to flot - if ((err[e].err == 'x' && invertX) || (err[e].err == 'y' && invertY)) { - //swap coordinates - var tmp = lower; - lower = upper; - upper = tmp; - tmp = drawLower; - drawLower = drawUpper; - drawUpper = tmp; - tmp = minmax[0]; - minmax[0] = minmax[1]; - minmax[1] = tmp; - } - - // convert to pixels - x = ax[0].p2c(x), - y = ax[1].p2c(y), - upper = ax[e].p2c(upper); - lower = ax[e].p2c(lower); - minmax[0] = ax[e].p2c(minmax[0]); - minmax[1] = ax[e].p2c(minmax[1]); - - //same style as points by default - var lw = err[e].lineWidth ? err[e].lineWidth : s.points.lineWidth, - sw = s.points.shadowSize != null ? s.points.shadowSize : s.shadowSize; - - //shadow as for points - if (lw > 0 && sw > 0) { - var w = sw / 2; - ctx.lineWidth = w; - ctx.strokeStyle = "rgba(0,0,0,0.1)"; - drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, w + w/2, minmax); - - ctx.strokeStyle = "rgba(0,0,0,0.2)"; - drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, w/2, minmax); - } - - ctx.strokeStyle = err[e].color? err[e].color: s.color; - ctx.lineWidth = lw; - //draw it - drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, 0, minmax); - } - } - } - } - - function drawError(ctx,err,x,y,upper,lower,drawUpper,drawLower,radius,offset,minmax){ - - //shadow offset - y += offset; - upper += offset; - lower += offset; - - // error bar - avoid plotting over circles - if (err.err == 'x'){ - if (upper > x + radius) drawPath(ctx, [[upper,y],[Math.max(x + radius,minmax[0]),y]]); - else drawUpper = false; - if (lower < x - radius) drawPath(ctx, [[Math.min(x - radius,minmax[1]),y],[lower,y]] ); - else drawLower = false; - } - else { - if (upper < y - radius) drawPath(ctx, [[x,upper],[x,Math.min(y - radius,minmax[0])]] ); - else drawUpper = false; - if (lower > y + radius) drawPath(ctx, [[x,Math.max(y + radius,minmax[1])],[x,lower]] ); - else drawLower = false; - } - - //internal radius value in errorbar, allows to plot radius 0 points and still keep proper sized caps - //this is a way to get errorbars on lines without visible connecting dots - radius = err.radius != null? err.radius: radius; - - // upper cap - if (drawUpper) { - if (err.upperCap == '-'){ - if (err.err=='x') drawPath(ctx, [[upper,y - radius],[upper,y + radius]] ); - else drawPath(ctx, [[x - radius,upper],[x + radius,upper]] ); - } else if ($.isFunction(err.upperCap)){ - if (err.err=='x') err.upperCap(ctx, upper, y, radius); - else err.upperCap(ctx, x, upper, radius); - } - } - // lower cap - if (drawLower) { - if (err.lowerCap == '-'){ - if (err.err=='x') drawPath(ctx, [[lower,y - radius],[lower,y + radius]] ); - else drawPath(ctx, [[x - radius,lower],[x + radius,lower]] ); - } else if ($.isFunction(err.lowerCap)){ - if (err.err=='x') err.lowerCap(ctx, lower, y, radius); - else err.lowerCap(ctx, x, lower, radius); - } - } - } - - function drawPath(ctx, pts){ - ctx.beginPath(); - ctx.moveTo(pts[0][0], pts[0][1]); - for (var p=1; p < pts.length; p++) - ctx.lineTo(pts[p][0], pts[p][1]); - ctx.stroke(); - } - - function draw(plot, ctx){ - var plotOffset = plot.getPlotOffset(); - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - $.each(plot.getData(), function (i, s) { - if (s.points.errorbars && (s.points.xerr.show || s.points.yerr.show)) - drawSeriesErrors(plot, ctx, s); - }); - ctx.restore(); - } - - function init(plot) { - plot.hooks.processRawData.push(processRawData); - plot.hooks.draw.push(draw); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'errorbars', - version: '1.0' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.fillbetween.js b/src/legacy/ui/public/flot-charts/jquery.flot.fillbetween.js deleted file mode 100644 index 18b15d26db8c9..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.fillbetween.js +++ /dev/null @@ -1,226 +0,0 @@ -/* Flot plugin for computing bottoms for filled line and bar charts. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The case: you've got two series that you want to fill the area between. In Flot -terms, you need to use one as the fill bottom of the other. You can specify the -bottom of each data point as the third coordinate manually, or you can use this -plugin to compute it for you. - -In order to name the other series, you need to give it an id, like this: - - var dataset = [ - { data: [ ... ], id: "foo" } , // use default bottom - { data: [ ... ], fillBetween: "foo" }, // use first dataset as bottom - ]; - - $.plot($("#placeholder"), dataset, { lines: { show: true, fill: true }}); - -As a convenience, if the id given is a number that doesn't appear as an id in -the series, it is interpreted as the index in the array instead (so fillBetween: -0 can also mean the first series). - -Internally, the plugin modifies the datapoints in each series. For line series, -extra data points might be inserted through interpolation. Note that at points -where the bottom line is not defined (due to a null point or start/end of line), -the current line will show a gap too. The algorithm comes from the -jquery.flot.stack.js plugin, possibly some code could be shared. - -*/ - -(function ( $ ) { - - var options = { - series: { - fillBetween: null // or number - } - }; - - function init( plot ) { - - function findBottomSeries( s, allseries ) { - - var i; - - for ( i = 0; i < allseries.length; ++i ) { - if ( allseries[ i ].id === s.fillBetween ) { - return allseries[ i ]; - } - } - - if ( typeof s.fillBetween === "number" ) { - if ( s.fillBetween < 0 || s.fillBetween >= allseries.length ) { - return null; - } - return allseries[ s.fillBetween ]; - } - - return null; - } - - function computeFillBottoms( plot, s, datapoints ) { - - if ( s.fillBetween == null ) { - return; - } - - var other = findBottomSeries( s, plot.getData() ); - - if ( !other ) { - return; - } - - var ps = datapoints.pointsize, - points = datapoints.points, - otherps = other.datapoints.pointsize, - otherpoints = other.datapoints.points, - newpoints = [], - px, py, intery, qx, qy, bottom, - withlines = s.lines.show, - withbottom = ps > 2 && datapoints.format[2].y, - withsteps = withlines && s.lines.steps, - fromgap = true, - i = 0, - j = 0, - l, m; - - while ( true ) { - - if ( i >= points.length ) { - break; - } - - l = newpoints.length; - - if ( points[ i ] == null ) { - - // copy gaps - - for ( m = 0; m < ps; ++m ) { - newpoints.push( points[ i + m ] ); - } - - i += ps; - - } else if ( j >= otherpoints.length ) { - - // for lines, we can't use the rest of the points - - if ( !withlines ) { - for ( m = 0; m < ps; ++m ) { - newpoints.push( points[ i + m ] ); - } - } - - i += ps; - - } else if ( otherpoints[ j ] == null ) { - - // oops, got a gap - - for ( m = 0; m < ps; ++m ) { - newpoints.push( null ); - } - - fromgap = true; - j += otherps; - - } else { - - // cases where we actually got two points - - px = points[ i ]; - py = points[ i + 1 ]; - qx = otherpoints[ j ]; - qy = otherpoints[ j + 1 ]; - bottom = 0; - - if ( px === qx ) { - - for ( m = 0; m < ps; ++m ) { - newpoints.push( points[ i + m ] ); - } - - //newpoints[ l + 1 ] += qy; - bottom = qy; - - i += ps; - j += otherps; - - } else if ( px > qx ) { - - // we got past point below, might need to - // insert interpolated extra point - - if ( withlines && i > 0 && points[ i - ps ] != null ) { - intery = py + ( points[ i - ps + 1 ] - py ) * ( qx - px ) / ( points[ i - ps ] - px ); - newpoints.push( qx ); - newpoints.push( intery ); - for ( m = 2; m < ps; ++m ) { - newpoints.push( points[ i + m ] ); - } - bottom = qy; - } - - j += otherps; - - } else { // px < qx - - // if we come from a gap, we just skip this point - - if ( fromgap && withlines ) { - i += ps; - continue; - } - - for ( m = 0; m < ps; ++m ) { - newpoints.push( points[ i + m ] ); - } - - // we might be able to interpolate a point below, - // this can give us a better y - - if ( withlines && j > 0 && otherpoints[ j - otherps ] != null ) { - bottom = qy + ( otherpoints[ j - otherps + 1 ] - qy ) * ( px - qx ) / ( otherpoints[ j - otherps ] - qx ); - } - - //newpoints[l + 1] += bottom; - - i += ps; - } - - fromgap = false; - - if ( l !== newpoints.length && withbottom ) { - newpoints[ l + 2 ] = bottom; - } - } - - // maintain the line steps invariant - - if ( withsteps && l !== newpoints.length && l > 0 && - newpoints[ l ] !== null && - newpoints[ l ] !== newpoints[ l - ps ] && - newpoints[ l + 1 ] !== newpoints[ l - ps + 1 ] ) { - for (m = 0; m < ps; ++m) { - newpoints[ l + ps + m ] = newpoints[ l + m ]; - } - newpoints[ l + 1 ] = newpoints[ l - ps + 1 ]; - } - } - - datapoints.points = newpoints; - } - - plot.hooks.processDatapoints.push( computeFillBottoms ); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: "fillbetween", - version: "1.0" - }); - -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.image.js b/src/legacy/ui/public/flot-charts/jquery.flot.image.js deleted file mode 100644 index 178f0e69069ef..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.image.js +++ /dev/null @@ -1,241 +0,0 @@ -/* Flot plugin for plotting images. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The data syntax is [ [ image, x1, y1, x2, y2 ], ... ] where (x1, y1) and -(x2, y2) are where you intend the two opposite corners of the image to end up -in the plot. Image must be a fully loaded JavaScript image (you can make one -with new Image()). If the image is not complete, it's skipped when plotting. - -There are two helpers included for retrieving images. The easiest work the way -that you put in URLs instead of images in the data, like this: - - [ "myimage.png", 0, 0, 10, 10 ] - -Then call $.plot.image.loadData( data, options, callback ) where data and -options are the same as you pass in to $.plot. This loads the images, replaces -the URLs in the data with the corresponding images and calls "callback" when -all images are loaded (or failed loading). In the callback, you can then call -$.plot with the data set. See the included example. - -A more low-level helper, $.plot.image.load(urls, callback) is also included. -Given a list of URLs, it calls callback with an object mapping from URL to -Image object when all images are loaded or have failed loading. - -The plugin supports these options: - - series: { - images: { - show: boolean - anchor: "corner" or "center" - alpha: [ 0, 1 ] - } - } - -They can be specified for a specific series: - - $.plot( $("#placeholder"), [{ - data: [ ... ], - images: { ... } - ]) - -Note that because the data format is different from usual data points, you -can't use images with anything else in a specific data series. - -Setting "anchor" to "center" causes the pixels in the image to be anchored at -the corner pixel centers inside of at the pixel corners, effectively letting -half a pixel stick out to each side in the plot. - -A possible future direction could be support for tiling for large images (like -Google Maps). - -*/ - -(function ($) { - var options = { - series: { - images: { - show: false, - alpha: 1, - anchor: "corner" // or "center" - } - } - }; - - $.plot.image = {}; - - $.plot.image.loadDataImages = function (series, options, callback) { - var urls = [], points = []; - - var defaultShow = options.series.images.show; - - $.each(series, function (i, s) { - if (!(defaultShow || s.images.show)) - return; - - if (s.data) - s = s.data; - - $.each(s, function (i, p) { - if (typeof p[0] == "string") { - urls.push(p[0]); - points.push(p); - } - }); - }); - - $.plot.image.load(urls, function (loadedImages) { - $.each(points, function (i, p) { - var url = p[0]; - if (loadedImages[url]) - p[0] = loadedImages[url]; - }); - - callback(); - }); - } - - $.plot.image.load = function (urls, callback) { - var missing = urls.length, loaded = {}; - if (missing == 0) - callback({}); - - $.each(urls, function (i, url) { - var handler = function () { - --missing; - - loaded[url] = this; - - if (missing == 0) - callback(loaded); - }; - - $('').load(handler).error(handler).attr('src', url); - }); - }; - - function drawSeries(plot, ctx, series) { - var plotOffset = plot.getPlotOffset(); - - if (!series.images || !series.images.show) - return; - - var points = series.datapoints.points, - ps = series.datapoints.pointsize; - - for (var i = 0; i < points.length; i += ps) { - var img = points[i], - x1 = points[i + 1], y1 = points[i + 2], - x2 = points[i + 3], y2 = points[i + 4], - xaxis = series.xaxis, yaxis = series.yaxis, - tmp; - - // actually we should check img.complete, but it - // appears to be a somewhat unreliable indicator in - // IE6 (false even after load event) - if (!img || img.width <= 0 || img.height <= 0) - continue; - - if (x1 > x2) { - tmp = x2; - x2 = x1; - x1 = tmp; - } - if (y1 > y2) { - tmp = y2; - y2 = y1; - y1 = tmp; - } - - // if the anchor is at the center of the pixel, expand the - // image by 1/2 pixel in each direction - if (series.images.anchor == "center") { - tmp = 0.5 * (x2-x1) / (img.width - 1); - x1 -= tmp; - x2 += tmp; - tmp = 0.5 * (y2-y1) / (img.height - 1); - y1 -= tmp; - y2 += tmp; - } - - // clip - if (x1 == x2 || y1 == y2 || - x1 >= xaxis.max || x2 <= xaxis.min || - y1 >= yaxis.max || y2 <= yaxis.min) - continue; - - var sx1 = 0, sy1 = 0, sx2 = img.width, sy2 = img.height; - if (x1 < xaxis.min) { - sx1 += (sx2 - sx1) * (xaxis.min - x1) / (x2 - x1); - x1 = xaxis.min; - } - - if (x2 > xaxis.max) { - sx2 += (sx2 - sx1) * (xaxis.max - x2) / (x2 - x1); - x2 = xaxis.max; - } - - if (y1 < yaxis.min) { - sy2 += (sy1 - sy2) * (yaxis.min - y1) / (y2 - y1); - y1 = yaxis.min; - } - - if (y2 > yaxis.max) { - sy1 += (sy1 - sy2) * (yaxis.max - y2) / (y2 - y1); - y2 = yaxis.max; - } - - x1 = xaxis.p2c(x1); - x2 = xaxis.p2c(x2); - y1 = yaxis.p2c(y1); - y2 = yaxis.p2c(y2); - - // the transformation may have swapped us - if (x1 > x2) { - tmp = x2; - x2 = x1; - x1 = tmp; - } - if (y1 > y2) { - tmp = y2; - y2 = y1; - y1 = tmp; - } - - tmp = ctx.globalAlpha; - ctx.globalAlpha *= series.images.alpha; - ctx.drawImage(img, - sx1, sy1, sx2 - sx1, sy2 - sy1, - x1 + plotOffset.left, y1 + plotOffset.top, - x2 - x1, y2 - y1); - ctx.globalAlpha = tmp; - } - } - - function processRawData(plot, series, data, datapoints) { - if (!series.images.show) - return; - - // format is Image, x1, y1, x2, y2 (opposite corners) - datapoints.format = [ - { required: true }, - { x: true, number: true, required: true }, - { y: true, number: true, required: true }, - { x: true, number: true, required: true }, - { y: true, number: true, required: true } - ]; - } - - function init(plot) { - plot.hooks.processRawData.push(processRawData); - plot.hooks.drawSeries.push(drawSeries); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'image', - version: '1.1' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.js b/src/legacy/ui/public/flot-charts/jquery.flot.js deleted file mode 100644 index 43db1cc3d93db..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.js +++ /dev/null @@ -1,3168 +0,0 @@ -/* JavaScript plotting library for jQuery, version 0.8.3. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -*/ - -// first an inline dependency, jquery.colorhelpers.js, we inline it here -// for convenience - -/* Plugin for jQuery for working with colors. - * - * Version 1.1. - * - * Inspiration from jQuery color animation plugin by John Resig. - * - * Released under the MIT license by Ole Laursen, October 2009. - * - * Examples: - * - * $.color.parse("#fff").scale('rgb', 0.25).add('a', -0.5).toString() - * var c = $.color.extract($("#mydiv"), 'background-color'); - * console.log(c.r, c.g, c.b, c.a); - * $.color.make(100, 50, 25, 0.4).toString() // returns "rgba(100,50,25,0.4)" - * - * Note that .scale() and .add() return the same modified object - * instead of making a new one. - * - * V. 1.1: Fix error handling so e.g. parsing an empty string does - * produce a color rather than just crashing. - */ -(function($){$.color={};$.color.make=function(r,g,b,a){var o={};o.r=r||0;o.g=g||0;o.b=b||0;o.a=a!=null?a:1;o.add=function(c,d){for(var i=0;i=1){return"rgb("+[o.r,o.g,o.b].join(",")+")"}else{return"rgba("+[o.r,o.g,o.b,o.a].join(",")+")"}};o.normalize=function(){function clamp(min,value,max){return valuemax?max:value}o.r=clamp(0,parseInt(o.r),255);o.g=clamp(0,parseInt(o.g),255);o.b=clamp(0,parseInt(o.b),255);o.a=clamp(0,o.a,1);return o};o.clone=function(){return $.color.make(o.r,o.b,o.g,o.a)};return o.normalize()};$.color.extract=function(elem,css){var c;do{c=elem.css(css).toLowerCase();if(c!=""&&c!="transparent")break;elem=elem.parent()}while(elem.length&&!$.nodeName(elem.get(0),"body"));if(c=="rgba(0, 0, 0, 0)")c="transparent";return $.color.parse(c)};$.color.parse=function(str){var res,m=$.color.make;if(res=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(str))return m(parseInt(res[1],10),parseInt(res[2],10),parseInt(res[3],10));if(res=/rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(str))return m(parseInt(res[1],10),parseInt(res[2],10),parseInt(res[3],10),parseFloat(res[4]));if(res=/rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(str))return m(parseFloat(res[1])*2.55,parseFloat(res[2])*2.55,parseFloat(res[3])*2.55);if(res=/rgba\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(str))return m(parseFloat(res[1])*2.55,parseFloat(res[2])*2.55,parseFloat(res[3])*2.55,parseFloat(res[4]));if(res=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(str))return m(parseInt(res[1],16),parseInt(res[2],16),parseInt(res[3],16));if(res=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(str))return m(parseInt(res[1]+res[1],16),parseInt(res[2]+res[2],16),parseInt(res[3]+res[3],16));var name=$.trim(str).toLowerCase();if(name=="transparent")return m(255,255,255,0);else{res=lookupColors[name]||[0,0,0];return m(res[0],res[1],res[2])}};var lookupColors={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0]}})(jQuery); - -// the actual Flot code -(function($) { - - // Cache the prototype hasOwnProperty for faster access - - var hasOwnProperty = Object.prototype.hasOwnProperty; - - // A shim to provide 'detach' to jQuery versions prior to 1.4. Using a DOM - // operation produces the same effect as detach, i.e. removing the element - // without touching its jQuery data. - - // Do not merge this into Flot 0.9, since it requires jQuery 1.4.4+. - - if (!$.fn.detach) { - $.fn.detach = function() { - return this.each(function() { - if (this.parentNode) { - this.parentNode.removeChild( this ); - } - }); - }; - } - - /////////////////////////////////////////////////////////////////////////// - // The Canvas object is a wrapper around an HTML5 tag. - // - // @constructor - // @param {string} cls List of classes to apply to the canvas. - // @param {element} container Element onto which to append the canvas. - // - // Requiring a container is a little iffy, but unfortunately canvas - // operations don't work unless the canvas is attached to the DOM. - - function Canvas(cls, container) { - - var element = container.children("." + cls)[0]; - - if (element == null) { - - element = document.createElement("canvas"); - element.className = cls; - - $(element).css({ direction: "ltr", position: "absolute", left: 0, top: 0 }) - .appendTo(container); - - // If HTML5 Canvas isn't available, fall back to [Ex|Flash]canvas - - if (!element.getContext) { - if (window.G_vmlCanvasManager) { - element = window.G_vmlCanvasManager.initElement(element); - } else { - throw new Error("Canvas is not available. If you're using IE with a fall-back such as Excanvas, then there's either a mistake in your conditional include, or the page has no DOCTYPE and is rendering in Quirks Mode."); - } - } - } - - this.element = element; - - var context = this.context = element.getContext("2d"); - - // Determine the screen's ratio of physical to device-independent - // pixels. This is the ratio between the canvas width that the browser - // advertises and the number of pixels actually present in that space. - - // The iPhone 4, for example, has a device-independent width of 320px, - // but its screen is actually 640px wide. It therefore has a pixel - // ratio of 2, while most normal devices have a ratio of 1. - - var devicePixelRatio = window.devicePixelRatio || 1, - backingStoreRatio = - context.webkitBackingStorePixelRatio || - context.mozBackingStorePixelRatio || - context.msBackingStorePixelRatio || - context.oBackingStorePixelRatio || - context.backingStorePixelRatio || 1; - - this.pixelRatio = devicePixelRatio / backingStoreRatio; - - // Size the canvas to match the internal dimensions of its container - - this.resize(container.width(), container.height()); - - // Collection of HTML div layers for text overlaid onto the canvas - - this.textContainer = null; - this.text = {}; - - // Cache of text fragments and metrics, so we can avoid expensively - // re-calculating them when the plot is re-rendered in a loop. - - this._textCache = {}; - } - - // Resizes the canvas to the given dimensions. - // - // @param {number} width New width of the canvas, in pixels. - // @param {number} width New height of the canvas, in pixels. - - Canvas.prototype.resize = function(width, height) { - - if (width <= 0 || height <= 0) { - throw new Error("Invalid dimensions for plot, width = " + width + ", height = " + height); - } - - var element = this.element, - context = this.context, - pixelRatio = this.pixelRatio; - - // Resize the canvas, increasing its density based on the display's - // pixel ratio; basically giving it more pixels without increasing the - // size of its element, to take advantage of the fact that retina - // displays have that many more pixels in the same advertised space. - - // Resizing should reset the state (excanvas seems to be buggy though) - - if (this.width != width) { - element.width = width * pixelRatio; - element.style.width = width + "px"; - this.width = width; - } - - if (this.height != height) { - element.height = height * pixelRatio; - element.style.height = height + "px"; - this.height = height; - } - - // Save the context, so we can reset in case we get replotted. The - // restore ensure that we're really back at the initial state, and - // should be safe even if we haven't saved the initial state yet. - - context.restore(); - context.save(); - - // Scale the coordinate space to match the display density; so even though we - // may have twice as many pixels, we still want lines and other drawing to - // appear at the same size; the extra pixels will just make them crisper. - - context.scale(pixelRatio, pixelRatio); - }; - - // Clears the entire canvas area, not including any overlaid HTML text - - Canvas.prototype.clear = function() { - this.context.clearRect(0, 0, this.width, this.height); - }; - - // Finishes rendering the canvas, including managing the text overlay. - - Canvas.prototype.render = function() { - - var cache = this._textCache; - - // For each text layer, add elements marked as active that haven't - // already been rendered, and remove those that are no longer active. - - for (var layerKey in cache) { - if (hasOwnProperty.call(cache, layerKey)) { - - var layer = this.getTextLayer(layerKey), - layerCache = cache[layerKey]; - - layer.hide(); - - for (var styleKey in layerCache) { - if (hasOwnProperty.call(layerCache, styleKey)) { - var styleCache = layerCache[styleKey]; - for (var key in styleCache) { - if (hasOwnProperty.call(styleCache, key)) { - - var positions = styleCache[key].positions; - - for (var i = 0, position; position = positions[i]; i++) { - if (position.active) { - if (!position.rendered) { - layer.append(position.element); - position.rendered = true; - } - } else { - positions.splice(i--, 1); - if (position.rendered) { - position.element.detach(); - } - } - } - - if (positions.length == 0) { - delete styleCache[key]; - } - } - } - } - } - - layer.show(); - } - } - }; - - // Creates (if necessary) and returns the text overlay container. - // - // @param {string} classes String of space-separated CSS classes used to - // uniquely identify the text layer. - // @return {object} The jQuery-wrapped text-layer div. - - Canvas.prototype.getTextLayer = function(classes) { - - var layer = this.text[classes]; - - // Create the text layer if it doesn't exist - - if (layer == null) { - - // Create the text layer container, if it doesn't exist - - if (this.textContainer == null) { - this.textContainer = $("
") - .css({ - position: "absolute", - top: 0, - left: 0, - bottom: 0, - right: 0, - 'font-size': "smaller", - color: "#545454" - }) - .insertAfter(this.element); - } - - layer = this.text[classes] = $("
") - .addClass(classes) - .css({ - position: "absolute", - top: 0, - left: 0, - bottom: 0, - right: 0 - }) - .appendTo(this.textContainer); - } - - return layer; - }; - - // Creates (if necessary) and returns a text info object. - // - // The object looks like this: - // - // { - // width: Width of the text's wrapper div. - // height: Height of the text's wrapper div. - // element: The jQuery-wrapped HTML div containing the text. - // positions: Array of positions at which this text is drawn. - // } - // - // The positions array contains objects that look like this: - // - // { - // active: Flag indicating whether the text should be visible. - // rendered: Flag indicating whether the text is currently visible. - // element: The jQuery-wrapped HTML div containing the text. - // x: X coordinate at which to draw the text. - // y: Y coordinate at which to draw the text. - // } - // - // Each position after the first receives a clone of the original element. - // - // The idea is that that the width, height, and general 'identity' of the - // text is constant no matter where it is placed; the placements are a - // secondary property. - // - // Canvas maintains a cache of recently-used text info objects; getTextInfo - // either returns the cached element or creates a new entry. - // - // @param {string} layer A string of space-separated CSS classes uniquely - // identifying the layer containing this text. - // @param {string} text Text string to retrieve info for. - // @param {(string|object)=} font Either a string of space-separated CSS - // classes or a font-spec object, defining the text's font and style. - // @param {number=} angle Angle at which to rotate the text, in degrees. - // Angle is currently unused, it will be implemented in the future. - // @param {number=} width Maximum width of the text before it wraps. - // @return {object} a text info object. - - Canvas.prototype.getTextInfo = function(layer, text, font, angle, width) { - - var textStyle, layerCache, styleCache, info; - - // Cast the value to a string, in case we were given a number or such - - text = "" + text; - - // If the font is a font-spec object, generate a CSS font definition - - if (typeof font === "object") { - textStyle = font.style + " " + font.variant + " " + font.weight + " " + font.size + "px/" + font.lineHeight + "px " + font.family; - } else { - textStyle = font; - } - - // Retrieve (or create) the cache for the text's layer and styles - - layerCache = this._textCache[layer]; - - if (layerCache == null) { - layerCache = this._textCache[layer] = {}; - } - - styleCache = layerCache[textStyle]; - - if (styleCache == null) { - styleCache = layerCache[textStyle] = {}; - } - - info = styleCache[text]; - - // If we can't find a matching element in our cache, create a new one - - if (info == null) { - - var element = $("
").html(text) - .css({ - position: "absolute", - 'max-width': width, - top: -9999 - }) - .appendTo(this.getTextLayer(layer)); - - if (typeof font === "object") { - element.css({ - font: textStyle, - color: font.color - }); - } else if (typeof font === "string") { - element.addClass(font); - } - - info = styleCache[text] = { - width: element.outerWidth(true), - height: element.outerHeight(true), - element: element, - positions: [] - }; - - element.detach(); - } - - return info; - }; - - // Adds a text string to the canvas text overlay. - // - // The text isn't drawn immediately; it is marked as rendering, which will - // result in its addition to the canvas on the next render pass. - // - // @param {string} layer A string of space-separated CSS classes uniquely - // identifying the layer containing this text. - // @param {number} x X coordinate at which to draw the text. - // @param {number} y Y coordinate at which to draw the text. - // @param {string} text Text string to draw. - // @param {(string|object)=} font Either a string of space-separated CSS - // classes or a font-spec object, defining the text's font and style. - // @param {number=} angle Angle at which to rotate the text, in degrees. - // Angle is currently unused, it will be implemented in the future. - // @param {number=} width Maximum width of the text before it wraps. - // @param {string=} halign Horizontal alignment of the text; either "left", - // "center" or "right". - // @param {string=} valign Vertical alignment of the text; either "top", - // "middle" or "bottom". - - Canvas.prototype.addText = function(layer, x, y, text, font, angle, width, halign, valign) { - - var info = this.getTextInfo(layer, text, font, angle, width), - positions = info.positions; - - // Tweak the div's position to match the text's alignment - - if (halign == "center") { - x -= info.width / 2; - } else if (halign == "right") { - x -= info.width; - } - - if (valign == "middle") { - y -= info.height / 2; - } else if (valign == "bottom") { - y -= info.height; - } - - // Determine whether this text already exists at this position. - // If so, mark it for inclusion in the next render pass. - - for (var i = 0, position; position = positions[i]; i++) { - if (position.x == x && position.y == y) { - position.active = true; - return; - } - } - - // If the text doesn't exist at this position, create a new entry - - // For the very first position we'll re-use the original element, - // while for subsequent ones we'll clone it. - - position = { - active: true, - rendered: false, - element: positions.length ? info.element.clone() : info.element, - x: x, - y: y - }; - - positions.push(position); - - // Move the element to its final position within the container - - position.element.css({ - top: Math.round(y), - left: Math.round(x), - 'text-align': halign // In case the text wraps - }); - }; - - // Removes one or more text strings from the canvas text overlay. - // - // If no parameters are given, all text within the layer is removed. - // - // Note that the text is not immediately removed; it is simply marked as - // inactive, which will result in its removal on the next render pass. - // This avoids the performance penalty for 'clear and redraw' behavior, - // where we potentially get rid of all text on a layer, but will likely - // add back most or all of it later, as when redrawing axes, for example. - // - // @param {string} layer A string of space-separated CSS classes uniquely - // identifying the layer containing this text. - // @param {number=} x X coordinate of the text. - // @param {number=} y Y coordinate of the text. - // @param {string=} text Text string to remove. - // @param {(string|object)=} font Either a string of space-separated CSS - // classes or a font-spec object, defining the text's font and style. - // @param {number=} angle Angle at which the text is rotated, in degrees. - // Angle is currently unused, it will be implemented in the future. - - Canvas.prototype.removeText = function(layer, x, y, text, font, angle) { - if (text == null) { - var layerCache = this._textCache[layer]; - if (layerCache != null) { - for (var styleKey in layerCache) { - if (hasOwnProperty.call(layerCache, styleKey)) { - var styleCache = layerCache[styleKey]; - for (var key in styleCache) { - if (hasOwnProperty.call(styleCache, key)) { - var positions = styleCache[key].positions; - for (var i = 0, position; position = positions[i]; i++) { - position.active = false; - } - } - } - } - } - } - } else { - var positions = this.getTextInfo(layer, text, font, angle).positions; - for (var i = 0, position; position = positions[i]; i++) { - if (position.x == x && position.y == y) { - position.active = false; - } - } - } - }; - - /////////////////////////////////////////////////////////////////////////// - // The top-level container for the entire plot. - - function Plot(placeholder, data_, options_, plugins) { - // data is on the form: - // [ series1, series2 ... ] - // where series is either just the data as [ [x1, y1], [x2, y2], ... ] - // or { data: [ [x1, y1], [x2, y2], ... ], label: "some label", ... } - - var series = [], - options = { - // the color theme used for graphs - colors: ["#edc240", "#afd8f8", "#cb4b4b", "#4da74d", "#9440ed"], - legend: { - show: true, - noColumns: 1, // number of columns in legend table - labelFormatter: null, // fn: string -> string - labelBoxBorderColor: "#ccc", // border color for the little label boxes - container: null, // container (as jQuery object) to put legend in, null means default on top of graph - position: "ne", // position of default legend container within plot - margin: 5, // distance from grid edge to default legend container within plot - backgroundColor: null, // null means auto-detect - backgroundOpacity: 0.85, // set to 0 to avoid background - sorted: null // default to no legend sorting - }, - xaxis: { - show: null, // null = auto-detect, true = always, false = never - position: "bottom", // or "top" - mode: null, // null or "time" - font: null, // null (derived from CSS in placeholder) or object like { size: 11, lineHeight: 13, style: "italic", weight: "bold", family: "sans-serif", variant: "small-caps" } - color: null, // base color, labels, ticks - tickColor: null, // possibly different color of ticks, e.g. "rgba(0,0,0,0.15)" - transform: null, // null or f: number -> number to transform axis - inverseTransform: null, // if transform is set, this should be the inverse function - min: null, // min. value to show, null means set automatically - max: null, // max. value to show, null means set automatically - autoscaleMargin: null, // margin in % to add if auto-setting min/max - ticks: null, // either [1, 3] or [[1, "a"], 3] or (fn: axis info -> ticks) or app. number of ticks for auto-ticks - tickFormatter: null, // fn: number -> string - labelWidth: null, // size of tick labels in pixels - labelHeight: null, - reserveSpace: null, // whether to reserve space even if axis isn't shown - tickLength: null, // size in pixels of ticks, or "full" for whole line - alignTicksWithAxis: null, // axis number or null for no sync - tickDecimals: null, // no. of decimals, null means auto - tickSize: null, // number or [number, "unit"] - minTickSize: null // number or [number, "unit"] - }, - yaxis: { - autoscaleMargin: 0.02, - position: "left" // or "right" - }, - xaxes: [], - yaxes: [], - series: { - points: { - show: false, - radius: 3, - lineWidth: 2, // in pixels - fill: true, - fillColor: "#ffffff", - symbol: "circle" // or callback - }, - lines: { - // we don't put in show: false so we can see - // whether lines were actively disabled - lineWidth: 2, // in pixels - fill: false, - fillColor: null, - steps: false - // Omit 'zero', so we can later default its value to - // match that of the 'fill' option. - }, - bars: { - show: false, - lineWidth: 2, // in pixels - barWidth: 1, // in units of the x axis - fill: true, - fillColor: null, - align: "left", // "left", "right", or "center" - horizontal: false, - zero: true - }, - shadowSize: 3, - highlightColor: null - }, - grid: { - show: true, - aboveData: false, - color: "#545454", // primary color used for outline and labels - backgroundColor: null, // null for transparent, else color - borderColor: null, // set if different from the grid color - tickColor: null, // color for the ticks, e.g. "rgba(0,0,0,0.15)" - margin: 0, // distance from the canvas edge to the grid - labelMargin: 5, // in pixels - axisMargin: 8, // in pixels - borderWidth: 2, // in pixels - minBorderMargin: null, // in pixels, null means taken from points radius - markings: null, // array of ranges or fn: axes -> array of ranges - markingsColor: "#f4f4f4", - markingsLineWidth: 2, - // interactive stuff - clickable: false, - hoverable: false, - autoHighlight: true, // highlight in case mouse is near - mouseActiveRadius: 10 // how far the mouse can be away to activate an item - }, - interaction: { - redrawOverlayInterval: 1000/60 // time between updates, -1 means in same flow - }, - hooks: {} - }, - surface = null, // the canvas for the plot itself - overlay = null, // canvas for interactive stuff on top of plot - eventHolder = null, // jQuery object that events should be bound to - ctx = null, octx = null, - xaxes = [], yaxes = [], - plotOffset = { left: 0, right: 0, top: 0, bottom: 0}, - plotWidth = 0, plotHeight = 0, - hooks = { - processOptions: [], - processRawData: [], - processDatapoints: [], - processOffset: [], - drawBackground: [], - drawSeries: [], - draw: [], - bindEvents: [], - drawOverlay: [], - shutdown: [] - }, - plot = this; - - // public functions - plot.setData = setData; - plot.setupGrid = setupGrid; - plot.draw = draw; - plot.getPlaceholder = function() { return placeholder; }; - plot.getCanvas = function() { return surface.element; }; - plot.getPlotOffset = function() { return plotOffset; }; - plot.width = function () { return plotWidth; }; - plot.height = function () { return plotHeight; }; - plot.offset = function () { - var o = eventHolder.offset(); - o.left += plotOffset.left; - o.top += plotOffset.top; - return o; - }; - plot.getData = function () { return series; }; - plot.getAxes = function () { - var res = {}, i; - $.each(xaxes.concat(yaxes), function (_, axis) { - if (axis) - res[axis.direction + (axis.n != 1 ? axis.n : "") + "axis"] = axis; - }); - return res; - }; - plot.getXAxes = function () { return xaxes; }; - plot.getYAxes = function () { return yaxes; }; - plot.c2p = canvasToAxisCoords; - plot.p2c = axisToCanvasCoords; - plot.getOptions = function () { return options; }; - plot.highlight = highlight; - plot.unhighlight = unhighlight; - plot.triggerRedrawOverlay = triggerRedrawOverlay; - plot.pointOffset = function(point) { - return { - left: parseInt(xaxes[axisNumber(point, "x") - 1].p2c(+point.x) + plotOffset.left, 10), - top: parseInt(yaxes[axisNumber(point, "y") - 1].p2c(+point.y) + plotOffset.top, 10) - }; - }; - plot.shutdown = shutdown; - plot.destroy = function () { - shutdown(); - placeholder.removeData("plot").empty(); - - series = []; - options = null; - surface = null; - overlay = null; - eventHolder = null; - ctx = null; - octx = null; - xaxes = []; - yaxes = []; - hooks = null; - highlights = []; - plot = null; - }; - plot.resize = function () { - var width = placeholder.width(), - height = placeholder.height(); - surface.resize(width, height); - overlay.resize(width, height); - }; - - // public attributes - plot.hooks = hooks; - - // initialize - initPlugins(plot); - parseOptions(options_); - setupCanvases(); - setData(data_); - setupGrid(); - draw(); - bindEvents(); - - - function executeHooks(hook, args) { - args = [plot].concat(args); - for (var i = 0; i < hook.length; ++i) - hook[i].apply(this, args); - } - - function initPlugins() { - - // References to key classes, allowing plugins to modify them - - var classes = { - Canvas: Canvas - }; - - for (var i = 0; i < plugins.length; ++i) { - var p = plugins[i]; - p.init(plot, classes); - if (p.options) - $.extend(true, options, p.options); - } - } - - function parseOptions(opts) { - - $.extend(true, options, opts); - - // $.extend merges arrays, rather than replacing them. When less - // colors are provided than the size of the default palette, we - // end up with those colors plus the remaining defaults, which is - // not expected behavior; avoid it by replacing them here. - - if (opts && opts.colors) { - options.colors = opts.colors; - } - - if (options.xaxis.color == null) - options.xaxis.color = $.color.parse(options.grid.color).scale('a', 0.22).toString(); - if (options.yaxis.color == null) - options.yaxis.color = $.color.parse(options.grid.color).scale('a', 0.22).toString(); - - if (options.xaxis.tickColor == null) // grid.tickColor for back-compatibility - options.xaxis.tickColor = options.grid.tickColor || options.xaxis.color; - if (options.yaxis.tickColor == null) // grid.tickColor for back-compatibility - options.yaxis.tickColor = options.grid.tickColor || options.yaxis.color; - - if (options.grid.borderColor == null) - options.grid.borderColor = options.grid.color; - if (options.grid.tickColor == null) - options.grid.tickColor = $.color.parse(options.grid.color).scale('a', 0.22).toString(); - - // Fill in defaults for axis options, including any unspecified - // font-spec fields, if a font-spec was provided. - - // If no x/y axis options were provided, create one of each anyway, - // since the rest of the code assumes that they exist. - - var i, axisOptions, axisCount, - fontSize = placeholder.css("font-size"), - fontSizeDefault = fontSize ? +fontSize.replace("px", "") : 13, - fontDefaults = { - style: placeholder.css("font-style"), - size: Math.round(0.8 * fontSizeDefault), - variant: placeholder.css("font-variant"), - weight: placeholder.css("font-weight"), - family: placeholder.css("font-family") - }; - - axisCount = options.xaxes.length || 1; - for (i = 0; i < axisCount; ++i) { - - axisOptions = options.xaxes[i]; - if (axisOptions && !axisOptions.tickColor) { - axisOptions.tickColor = axisOptions.color; - } - - axisOptions = $.extend(true, {}, options.xaxis, axisOptions); - options.xaxes[i] = axisOptions; - - if (axisOptions.font) { - axisOptions.font = $.extend({}, fontDefaults, axisOptions.font); - if (!axisOptions.font.color) { - axisOptions.font.color = axisOptions.color; - } - if (!axisOptions.font.lineHeight) { - axisOptions.font.lineHeight = Math.round(axisOptions.font.size * 1.15); - } - } - } - - axisCount = options.yaxes.length || 1; - for (i = 0; i < axisCount; ++i) { - - axisOptions = options.yaxes[i]; - if (axisOptions && !axisOptions.tickColor) { - axisOptions.tickColor = axisOptions.color; - } - - axisOptions = $.extend(true, {}, options.yaxis, axisOptions); - options.yaxes[i] = axisOptions; - - if (axisOptions.font) { - axisOptions.font = $.extend({}, fontDefaults, axisOptions.font); - if (!axisOptions.font.color) { - axisOptions.font.color = axisOptions.color; - } - if (!axisOptions.font.lineHeight) { - axisOptions.font.lineHeight = Math.round(axisOptions.font.size * 1.15); - } - } - } - - // backwards compatibility, to be removed in future - if (options.xaxis.noTicks && options.xaxis.ticks == null) - options.xaxis.ticks = options.xaxis.noTicks; - if (options.yaxis.noTicks && options.yaxis.ticks == null) - options.yaxis.ticks = options.yaxis.noTicks; - if (options.x2axis) { - options.xaxes[1] = $.extend(true, {}, options.xaxis, options.x2axis); - options.xaxes[1].position = "top"; - // Override the inherit to allow the axis to auto-scale - if (options.x2axis.min == null) { - options.xaxes[1].min = null; - } - if (options.x2axis.max == null) { - options.xaxes[1].max = null; - } - } - if (options.y2axis) { - options.yaxes[1] = $.extend(true, {}, options.yaxis, options.y2axis); - options.yaxes[1].position = "right"; - // Override the inherit to allow the axis to auto-scale - if (options.y2axis.min == null) { - options.yaxes[1].min = null; - } - if (options.y2axis.max == null) { - options.yaxes[1].max = null; - } - } - if (options.grid.coloredAreas) - options.grid.markings = options.grid.coloredAreas; - if (options.grid.coloredAreasColor) - options.grid.markingsColor = options.grid.coloredAreasColor; - if (options.lines) - $.extend(true, options.series.lines, options.lines); - if (options.points) - $.extend(true, options.series.points, options.points); - if (options.bars) - $.extend(true, options.series.bars, options.bars); - if (options.shadowSize != null) - options.series.shadowSize = options.shadowSize; - if (options.highlightColor != null) - options.series.highlightColor = options.highlightColor; - - // save options on axes for future reference - for (i = 0; i < options.xaxes.length; ++i) - getOrCreateAxis(xaxes, i + 1).options = options.xaxes[i]; - for (i = 0; i < options.yaxes.length; ++i) - getOrCreateAxis(yaxes, i + 1).options = options.yaxes[i]; - - // add hooks from options - for (var n in hooks) - if (options.hooks[n] && options.hooks[n].length) - hooks[n] = hooks[n].concat(options.hooks[n]); - - executeHooks(hooks.processOptions, [options]); - } - - function setData(d) { - series = parseData(d); - fillInSeriesOptions(); - processData(); - } - - function parseData(d) { - var res = []; - for (var i = 0; i < d.length; ++i) { - var s = $.extend(true, {}, options.series); - - if (d[i].data != null) { - s.data = d[i].data; // move the data instead of deep-copy - delete d[i].data; - - $.extend(true, s, d[i]); - - d[i].data = s.data; - } - else - s.data = d[i]; - res.push(s); - } - - return res; - } - - function axisNumber(obj, coord) { - var a = obj[coord + "axis"]; - if (typeof a == "object") // if we got a real axis, extract number - a = a.n; - if (typeof a != "number") - a = 1; // default to first axis - return a; - } - - function allAxes() { - // return flat array without annoying null entries - return $.grep(xaxes.concat(yaxes), function (a) { return a; }); - } - - function canvasToAxisCoords(pos) { - // return an object with x/y corresponding to all used axes - var res = {}, i, axis; - for (i = 0; i < xaxes.length; ++i) { - axis = xaxes[i]; - if (axis && axis.used) - res["x" + axis.n] = axis.c2p(pos.left); - } - - for (i = 0; i < yaxes.length; ++i) { - axis = yaxes[i]; - if (axis && axis.used) - res["y" + axis.n] = axis.c2p(pos.top); - } - - if (res.x1 !== undefined) - res.x = res.x1; - if (res.y1 !== undefined) - res.y = res.y1; - - return res; - } - - function axisToCanvasCoords(pos) { - // get canvas coords from the first pair of x/y found in pos - var res = {}, i, axis, key; - - for (i = 0; i < xaxes.length; ++i) { - axis = xaxes[i]; - if (axis && axis.used) { - key = "x" + axis.n; - if (pos[key] == null && axis.n == 1) - key = "x"; - - if (pos[key] != null) { - res.left = axis.p2c(pos[key]); - break; - } - } - } - - for (i = 0; i < yaxes.length; ++i) { - axis = yaxes[i]; - if (axis && axis.used) { - key = "y" + axis.n; - if (pos[key] == null && axis.n == 1) - key = "y"; - - if (pos[key] != null) { - res.top = axis.p2c(pos[key]); - break; - } - } - } - - return res; - } - - function getOrCreateAxis(axes, number) { - if (!axes[number - 1]) - axes[number - 1] = { - n: number, // save the number for future reference - direction: axes == xaxes ? "x" : "y", - options: $.extend(true, {}, axes == xaxes ? options.xaxis : options.yaxis) - }; - - return axes[number - 1]; - } - - function fillInSeriesOptions() { - - var neededColors = series.length, maxIndex = -1, i; - - // Subtract the number of series that already have fixed colors or - // color indexes from the number that we still need to generate. - - for (i = 0; i < series.length; ++i) { - var sc = series[i].color; - if (sc != null) { - neededColors--; - if (typeof sc == "number" && sc > maxIndex) { - maxIndex = sc; - } - } - } - - // If any of the series have fixed color indexes, then we need to - // generate at least as many colors as the highest index. - - if (neededColors <= maxIndex) { - neededColors = maxIndex + 1; - } - - // Generate all the colors, using first the option colors and then - // variations on those colors once they're exhausted. - - var c, colors = [], colorPool = options.colors, - colorPoolSize = colorPool.length, variation = 0; - - for (i = 0; i < neededColors; i++) { - - c = $.color.parse(colorPool[i % colorPoolSize] || "#666"); - - // Each time we exhaust the colors in the pool we adjust - // a scaling factor used to produce more variations on - // those colors. The factor alternates negative/positive - // to produce lighter/darker colors. - - // Reset the variation after every few cycles, or else - // it will end up producing only white or black colors. - - if (i % colorPoolSize == 0 && i) { - if (variation >= 0) { - if (variation < 0.5) { - variation = -variation - 0.2; - } else variation = 0; - } else variation = -variation; - } - - colors[i] = c.scale('rgb', 1 + variation); - } - - // Finalize the series options, filling in their colors - - var colori = 0, s; - for (i = 0; i < series.length; ++i) { - s = series[i]; - - // assign colors - if (s.color == null) { - s.color = colors[colori].toString(); - ++colori; - } - else if (typeof s.color == "number") - s.color = colors[s.color].toString(); - - // turn on lines automatically in case nothing is set - if (s.lines.show == null) { - var v, show = true; - for (v in s) - if (s[v] && s[v].show) { - show = false; - break; - } - if (show) - s.lines.show = true; - } - - // If nothing was provided for lines.zero, default it to match - // lines.fill, since areas by default should extend to zero. - - if (s.lines.zero == null) { - s.lines.zero = !!s.lines.fill; - } - - // setup axes - s.xaxis = getOrCreateAxis(xaxes, axisNumber(s, "x")); - s.yaxis = getOrCreateAxis(yaxes, axisNumber(s, "y")); - } - } - - function processData() { - var topSentry = Number.POSITIVE_INFINITY, - bottomSentry = Number.NEGATIVE_INFINITY, - fakeInfinity = Number.MAX_VALUE, - i, j, k, m, length, - s, points, ps, x, y, axis, val, f, p, - data, format; - - function updateAxis(axis, min, max) { - if (min < axis.datamin && min != -fakeInfinity) - axis.datamin = min; - if (max > axis.datamax && max != fakeInfinity) - axis.datamax = max; - } - - $.each(allAxes(), function (_, axis) { - // init axis - axis.datamin = topSentry; - axis.datamax = bottomSentry; - axis.used = false; - }); - - for (i = 0; i < series.length; ++i) { - s = series[i]; - s.datapoints = { points: [] }; - - executeHooks(hooks.processRawData, [ s, s.data, s.datapoints ]); - } - - // first pass: clean and copy data - for (i = 0; i < series.length; ++i) { - s = series[i]; - - data = s.data; - format = s.datapoints.format; - - if (!format) { - format = []; - // find out how to copy - format.push({ x: true, number: true, required: true }); - format.push({ y: true, number: true, required: true }); - - if (s.bars.show || (s.lines.show && s.lines.fill)) { - var autoscale = !!((s.bars.show && s.bars.zero) || (s.lines.show && s.lines.zero)); - format.push({ y: true, number: true, required: false, defaultValue: 0, autoscale: autoscale }); - if (s.bars.horizontal) { - delete format[format.length - 1].y; - format[format.length - 1].x = true; - } - } - - s.datapoints.format = format; - } - - if (s.datapoints.pointsize != null) - continue; // already filled in - - s.datapoints.pointsize = format.length; - - ps = s.datapoints.pointsize; - points = s.datapoints.points; - - var insertSteps = s.lines.show && s.lines.steps; - s.xaxis.used = s.yaxis.used = true; - - for (j = k = 0; j < data.length; ++j, k += ps) { - p = data[j]; - - var nullify = p == null; - if (!nullify) { - for (m = 0; m < ps; ++m) { - val = p[m]; - f = format[m]; - - if (f) { - if (f.number && val != null) { - val = +val; // convert to number - if (isNaN(val)) - val = null; - else if (val == Infinity) - val = fakeInfinity; - else if (val == -Infinity) - val = -fakeInfinity; - } - - if (val == null) { - if (f.required) - nullify = true; - - if (f.defaultValue != null) - val = f.defaultValue; - } - } - - points[k + m] = val; - } - } - - if (nullify) { - for (m = 0; m < ps; ++m) { - val = points[k + m]; - if (val != null) { - f = format[m]; - // extract min/max info - if (f.autoscale !== false) { - if (f.x) { - updateAxis(s.xaxis, val, val); - } - if (f.y) { - updateAxis(s.yaxis, val, val); - } - } - } - points[k + m] = null; - } - } - else { - // a little bit of line specific stuff that - // perhaps shouldn't be here, but lacking - // better means... - if (insertSteps && k > 0 - && points[k - ps] != null - && points[k - ps] != points[k] - && points[k - ps + 1] != points[k + 1]) { - // copy the point to make room for a middle point - for (m = 0; m < ps; ++m) - points[k + ps + m] = points[k + m]; - - // middle point has same y - points[k + 1] = points[k - ps + 1]; - - // we've added a point, better reflect that - k += ps; - } - } - } - } - - // give the hooks a chance to run - for (i = 0; i < series.length; ++i) { - s = series[i]; - - executeHooks(hooks.processDatapoints, [ s, s.datapoints]); - } - - // second pass: find datamax/datamin for auto-scaling - for (i = 0; i < series.length; ++i) { - s = series[i]; - points = s.datapoints.points; - ps = s.datapoints.pointsize; - format = s.datapoints.format; - - var xmin = topSentry, ymin = topSentry, - xmax = bottomSentry, ymax = bottomSentry; - - for (j = 0; j < points.length; j += ps) { - if (points[j] == null) - continue; - - for (m = 0; m < ps; ++m) { - val = points[j + m]; - f = format[m]; - if (!f || f.autoscale === false || val == fakeInfinity || val == -fakeInfinity) - continue; - - if (f.x) { - if (val < xmin) - xmin = val; - if (val > xmax) - xmax = val; - } - if (f.y) { - if (val < ymin) - ymin = val; - if (val > ymax) - ymax = val; - } - } - } - - if (s.bars.show) { - // make sure we got room for the bar on the dancing floor - var delta; - - switch (s.bars.align) { - case "left": - delta = 0; - break; - case "right": - delta = -s.bars.barWidth; - break; - default: - delta = -s.bars.barWidth / 2; - } - - if (s.bars.horizontal) { - ymin += delta; - ymax += delta + s.bars.barWidth; - } - else { - xmin += delta; - xmax += delta + s.bars.barWidth; - } - } - - updateAxis(s.xaxis, xmin, xmax); - updateAxis(s.yaxis, ymin, ymax); - } - - $.each(allAxes(), function (_, axis) { - if (axis.datamin == topSentry) - axis.datamin = null; - if (axis.datamax == bottomSentry) - axis.datamax = null; - }); - } - - function setupCanvases() { - - // Make sure the placeholder is clear of everything except canvases - // from a previous plot in this container that we'll try to re-use. - - placeholder.css("padding", 0) // padding messes up the positioning - .children().filter(function(){ - return !$(this).hasClass("flot-overlay") && !$(this).hasClass('flot-base'); - }).remove(); - - if (placeholder.css("position") == 'static') - placeholder.css("position", "relative"); // for positioning labels and overlay - - surface = new Canvas("flot-base", placeholder); - overlay = new Canvas("flot-overlay", placeholder); // overlay canvas for interactive features - - ctx = surface.context; - octx = overlay.context; - - // define which element we're listening for events on - eventHolder = $(overlay.element).unbind(); - - // If we're re-using a plot object, shut down the old one - - var existing = placeholder.data("plot"); - - if (existing) { - existing.shutdown(); - overlay.clear(); - } - - // save in case we get replotted - placeholder.data("plot", plot); - } - - function bindEvents() { - // bind events - if (options.grid.hoverable) { - eventHolder.mousemove(onMouseMove); - - // Use bind, rather than .mouseleave, because we officially - // still support jQuery 1.2.6, which doesn't define a shortcut - // for mouseenter or mouseleave. This was a bug/oversight that - // was fixed somewhere around 1.3.x. We can return to using - // .mouseleave when we drop support for 1.2.6. - - eventHolder.bind("mouseleave", onMouseLeave); - } - - if (options.grid.clickable) - eventHolder.click(onClick); - - executeHooks(hooks.bindEvents, [eventHolder]); - } - - function shutdown() { - if (redrawTimeout) - clearTimeout(redrawTimeout); - - eventHolder.unbind("mousemove", onMouseMove); - eventHolder.unbind("mouseleave", onMouseLeave); - eventHolder.unbind("click", onClick); - - executeHooks(hooks.shutdown, [eventHolder]); - } - - function setTransformationHelpers(axis) { - // set helper functions on the axis, assumes plot area - // has been computed already - - function identity(x) { return x; } - - var s, m, t = axis.options.transform || identity, - it = axis.options.inverseTransform; - - // precompute how much the axis is scaling a point - // in canvas space - if (axis.direction == "x") { - s = axis.scale = plotWidth / Math.abs(t(axis.max) - t(axis.min)); - m = Math.min(t(axis.max), t(axis.min)); - } - else { - s = axis.scale = plotHeight / Math.abs(t(axis.max) - t(axis.min)); - s = -s; - m = Math.max(t(axis.max), t(axis.min)); - } - - // data point to canvas coordinate - if (t == identity) // slight optimization - axis.p2c = function (p) { return (p - m) * s; }; - else - axis.p2c = function (p) { return (t(p) - m) * s; }; - // canvas coordinate to data point - if (!it) - axis.c2p = function (c) { return m + c / s; }; - else - axis.c2p = function (c) { return it(m + c / s); }; - } - - function measureTickLabels(axis) { - - var opts = axis.options, - ticks = axis.ticks || [], - labelWidth = opts.labelWidth || 0, - labelHeight = opts.labelHeight || 0, - maxWidth = labelWidth || (axis.direction == "x" ? Math.floor(surface.width / (ticks.length || 1)) : null), - legacyStyles = axis.direction + "Axis " + axis.direction + axis.n + "Axis", - layer = "flot-" + axis.direction + "-axis flot-" + axis.direction + axis.n + "-axis " + legacyStyles, - font = opts.font || "flot-tick-label tickLabel"; - - for (var i = 0; i < ticks.length; ++i) { - - var t = ticks[i]; - - if (!t.label) - continue; - - var info = surface.getTextInfo(layer, t.label, font, null, maxWidth); - - labelWidth = Math.max(labelWidth, info.width); - labelHeight = Math.max(labelHeight, info.height); - } - - axis.labelWidth = opts.labelWidth || labelWidth; - axis.labelHeight = opts.labelHeight || labelHeight; - } - - function allocateAxisBoxFirstPhase(axis) { - // find the bounding box of the axis by looking at label - // widths/heights and ticks, make room by diminishing the - // plotOffset; this first phase only looks at one - // dimension per axis, the other dimension depends on the - // other axes so will have to wait - - var lw = axis.labelWidth, - lh = axis.labelHeight, - pos = axis.options.position, - isXAxis = axis.direction === "x", - tickLength = axis.options.tickLength, - axisMargin = options.grid.axisMargin, - padding = options.grid.labelMargin, - innermost = true, - outermost = true, - first = true, - found = false; - - // Determine the axis's position in its direction and on its side - - $.each(isXAxis ? xaxes : yaxes, function(i, a) { - if (a && (a.show || a.reserveSpace)) { - if (a === axis) { - found = true; - } else if (a.options.position === pos) { - if (found) { - outermost = false; - } else { - innermost = false; - } - } - if (!found) { - first = false; - } - } - }); - - // The outermost axis on each side has no margin - - if (outermost) { - axisMargin = 0; - } - - // The ticks for the first axis in each direction stretch across - - if (tickLength == null) { - tickLength = first ? "full" : 5; - } - - if (!isNaN(+tickLength)) - padding += +tickLength; - - if (isXAxis) { - lh += padding; - - if (pos == "bottom") { - plotOffset.bottom += lh + axisMargin; - axis.box = { top: surface.height - plotOffset.bottom, height: lh }; - } - else { - axis.box = { top: plotOffset.top + axisMargin, height: lh }; - plotOffset.top += lh + axisMargin; - } - } - else { - lw += padding; - - if (pos == "left") { - axis.box = { left: plotOffset.left + axisMargin, width: lw }; - plotOffset.left += lw + axisMargin; - } - else { - plotOffset.right += lw + axisMargin; - axis.box = { left: surface.width - plotOffset.right, width: lw }; - } - } - - // save for future reference - axis.position = pos; - axis.tickLength = tickLength; - axis.box.padding = padding; - axis.innermost = innermost; - } - - function allocateAxisBoxSecondPhase(axis) { - // now that all axis boxes have been placed in one - // dimension, we can set the remaining dimension coordinates - if (axis.direction == "x") { - axis.box.left = plotOffset.left - axis.labelWidth / 2; - axis.box.width = surface.width - plotOffset.left - plotOffset.right + axis.labelWidth; - } - else { - axis.box.top = plotOffset.top - axis.labelHeight / 2; - axis.box.height = surface.height - plotOffset.bottom - plotOffset.top + axis.labelHeight; - } - } - - function adjustLayoutForThingsStickingOut() { - // possibly adjust plot offset to ensure everything stays - // inside the canvas and isn't clipped off - - var minMargin = options.grid.minBorderMargin, - axis, i; - - // check stuff from the plot (FIXME: this should just read - // a value from the series, otherwise it's impossible to - // customize) - if (minMargin == null) { - minMargin = 0; - for (i = 0; i < series.length; ++i) - minMargin = Math.max(minMargin, 2 * (series[i].points.radius + series[i].points.lineWidth/2)); - } - - var margins = { - left: minMargin, - right: minMargin, - top: minMargin, - bottom: minMargin - }; - - // check axis labels, note we don't check the actual - // labels but instead use the overall width/height to not - // jump as much around with replots - $.each(allAxes(), function (_, axis) { - if (axis.reserveSpace && axis.ticks && axis.ticks.length) { - if (axis.direction === "x") { - margins.left = Math.max(margins.left, axis.labelWidth / 2); - margins.right = Math.max(margins.right, axis.labelWidth / 2); - } else { - margins.bottom = Math.max(margins.bottom, axis.labelHeight / 2); - margins.top = Math.max(margins.top, axis.labelHeight / 2); - } - } - }); - - plotOffset.left = Math.ceil(Math.max(margins.left, plotOffset.left)); - plotOffset.right = Math.ceil(Math.max(margins.right, plotOffset.right)); - plotOffset.top = Math.ceil(Math.max(margins.top, plotOffset.top)); - plotOffset.bottom = Math.ceil(Math.max(margins.bottom, plotOffset.bottom)); - } - - function setupGrid() { - var i, axes = allAxes(), showGrid = options.grid.show; - - // Initialize the plot's offset from the edge of the canvas - - for (var a in plotOffset) { - var margin = options.grid.margin || 0; - plotOffset[a] = typeof margin == "number" ? margin : margin[a] || 0; - } - - executeHooks(hooks.processOffset, [plotOffset]); - - // If the grid is visible, add its border width to the offset - - for (var a in plotOffset) { - if(typeof(options.grid.borderWidth) == "object") { - plotOffset[a] += showGrid ? options.grid.borderWidth[a] : 0; - } - else { - plotOffset[a] += showGrid ? options.grid.borderWidth : 0; - } - } - - $.each(axes, function (_, axis) { - var axisOpts = axis.options; - axis.show = axisOpts.show == null ? axis.used : axisOpts.show; - axis.reserveSpace = axisOpts.reserveSpace == null ? axis.show : axisOpts.reserveSpace; - setRange(axis); - }); - - if (showGrid) { - - var allocatedAxes = $.grep(axes, function (axis) { - return axis.show || axis.reserveSpace; - }); - - $.each(allocatedAxes, function (_, axis) { - // make the ticks - setupTickGeneration(axis); - setTicks(axis); - snapRangeToTicks(axis, axis.ticks); - // find labelWidth/Height for axis - measureTickLabels(axis); - }); - - // with all dimensions calculated, we can compute the - // axis bounding boxes, start from the outside - // (reverse order) - for (i = allocatedAxes.length - 1; i >= 0; --i) - allocateAxisBoxFirstPhase(allocatedAxes[i]); - - // make sure we've got enough space for things that - // might stick out - adjustLayoutForThingsStickingOut(); - - $.each(allocatedAxes, function (_, axis) { - allocateAxisBoxSecondPhase(axis); - }); - } - - plotWidth = surface.width - plotOffset.left - plotOffset.right; - plotHeight = surface.height - plotOffset.bottom - plotOffset.top; - - // now we got the proper plot dimensions, we can compute the scaling - $.each(axes, function (_, axis) { - setTransformationHelpers(axis); - }); - - if (showGrid) { - drawAxisLabels(); - } - - insertLegend(); - } - - function setRange(axis) { - var opts = axis.options, - min = +(opts.min != null ? opts.min : axis.datamin), - max = +(opts.max != null ? opts.max : axis.datamax), - delta = max - min; - - if (delta == 0.0) { - // degenerate case - var widen = max == 0 ? 1 : 0.01; - - if (opts.min == null) - min -= widen; - // always widen max if we couldn't widen min to ensure we - // don't fall into min == max which doesn't work - if (opts.max == null || opts.min != null) - max += widen; - } - else { - // consider autoscaling - var margin = opts.autoscaleMargin; - if (margin != null) { - if (opts.min == null) { - min -= delta * margin; - // make sure we don't go below zero if all values - // are positive - if (min < 0 && axis.datamin != null && axis.datamin >= 0) - min = 0; - } - if (opts.max == null) { - max += delta * margin; - if (max > 0 && axis.datamax != null && axis.datamax <= 0) - max = 0; - } - } - } - axis.min = min; - axis.max = max; - } - - function setupTickGeneration(axis) { - var opts = axis.options; - - // estimate number of ticks - var noTicks; - if (typeof opts.ticks == "number" && opts.ticks > 0) - noTicks = opts.ticks; - else - // heuristic based on the model a*sqrt(x) fitted to - // some data points that seemed reasonable - noTicks = 0.3 * Math.sqrt(axis.direction == "x" ? surface.width : surface.height); - - var delta = (axis.max - axis.min) / noTicks, - dec = -Math.floor(Math.log(delta) / Math.LN10), - maxDec = opts.tickDecimals; - - if (maxDec != null && dec > maxDec) { - dec = maxDec; - } - - var magn = Math.pow(10, -dec), - norm = delta / magn, // norm is between 1.0 and 10.0 - size; - - if (norm < 1.5) { - size = 1; - } else if (norm < 3) { - size = 2; - // special case for 2.5, requires an extra decimal - if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) { - size = 2.5; - ++dec; - } - } else if (norm < 7.5) { - size = 5; - } else { - size = 10; - } - - size *= magn; - - if (opts.minTickSize != null && size < opts.minTickSize) { - size = opts.minTickSize; - } - - axis.delta = delta; - axis.tickDecimals = Math.max(0, maxDec != null ? maxDec : dec); - axis.tickSize = opts.tickSize || size; - - // Time mode was moved to a plug-in in 0.8, and since so many people use it - // we'll add an especially friendly reminder to make sure they included it. - - if (opts.mode == "time" && !axis.tickGenerator) { - throw new Error("Time mode requires the flot.time plugin."); - } - - // Flot supports base-10 axes; any other mode else is handled by a plug-in, - // like flot.time.js. - - if (!axis.tickGenerator) { - - axis.tickGenerator = function (axis) { - - var ticks = [], - start = floorInBase(axis.min, axis.tickSize), - i = 0, - v = Number.NaN, - prev; - - do { - prev = v; - v = start + i * axis.tickSize; - ticks.push(v); - ++i; - } while (v < axis.max && v != prev); - return ticks; - }; - - axis.tickFormatter = function (value, axis) { - - var factor = axis.tickDecimals ? Math.pow(10, axis.tickDecimals) : 1; - var formatted = "" + Math.round(value * factor) / factor; - - // If tickDecimals was specified, ensure that we have exactly that - // much precision; otherwise default to the value's own precision. - - if (axis.tickDecimals != null) { - var decimal = formatted.indexOf("."); - var precision = decimal == -1 ? 0 : formatted.length - decimal - 1; - if (precision < axis.tickDecimals) { - return (precision ? formatted : formatted + ".") + ("" + factor).substr(1, axis.tickDecimals - precision); - } - } - - return formatted; - }; - } - - if ($.isFunction(opts.tickFormatter)) - axis.tickFormatter = function (v, axis) { return "" + opts.tickFormatter(v, axis); }; - - if (opts.alignTicksWithAxis != null) { - var otherAxis = (axis.direction == "x" ? xaxes : yaxes)[opts.alignTicksWithAxis - 1]; - if (otherAxis && otherAxis.used && otherAxis != axis) { - // consider snapping min/max to outermost nice ticks - var niceTicks = axis.tickGenerator(axis); - if (niceTicks.length > 0) { - if (opts.min == null) - axis.min = Math.min(axis.min, niceTicks[0]); - if (opts.max == null && niceTicks.length > 1) - axis.max = Math.max(axis.max, niceTicks[niceTicks.length - 1]); - } - - axis.tickGenerator = function (axis) { - // copy ticks, scaled to this axis - var ticks = [], v, i; - for (i = 0; i < otherAxis.ticks.length; ++i) { - v = (otherAxis.ticks[i].v - otherAxis.min) / (otherAxis.max - otherAxis.min); - v = axis.min + v * (axis.max - axis.min); - ticks.push(v); - } - return ticks; - }; - - // we might need an extra decimal since forced - // ticks don't necessarily fit naturally - if (!axis.mode && opts.tickDecimals == null) { - var extraDec = Math.max(0, -Math.floor(Math.log(axis.delta) / Math.LN10) + 1), - ts = axis.tickGenerator(axis); - - // only proceed if the tick interval rounded - // with an extra decimal doesn't give us a - // zero at end - if (!(ts.length > 1 && /\..*0$/.test((ts[1] - ts[0]).toFixed(extraDec)))) - axis.tickDecimals = extraDec; - } - } - } - } - - function setTicks(axis) { - var oticks = axis.options.ticks, ticks = []; - if (oticks == null || (typeof oticks == "number" && oticks > 0)) - ticks = axis.tickGenerator(axis); - else if (oticks) { - if ($.isFunction(oticks)) - // generate the ticks - ticks = oticks(axis); - else - ticks = oticks; - } - - // clean up/labelify the supplied ticks, copy them over - var i, v; - axis.ticks = []; - for (i = 0; i < ticks.length; ++i) { - var label = null; - var t = ticks[i]; - if (typeof t == "object") { - v = +t[0]; - if (t.length > 1) - label = t[1]; - } - else - v = +t; - if (label == null) - label = axis.tickFormatter(v, axis); - if (!isNaN(v)) - axis.ticks.push({ v: v, label: label }); - } - } - - function snapRangeToTicks(axis, ticks) { - if (axis.options.autoscaleMargin && ticks.length > 0) { - // snap to ticks - if (axis.options.min == null) - axis.min = Math.min(axis.min, ticks[0].v); - if (axis.options.max == null && ticks.length > 1) - axis.max = Math.max(axis.max, ticks[ticks.length - 1].v); - } - } - - function draw() { - - surface.clear(); - - executeHooks(hooks.drawBackground, [ctx]); - - var grid = options.grid; - - // draw background, if any - if (grid.show && grid.backgroundColor) - drawBackground(); - - if (grid.show && !grid.aboveData) { - drawGrid(); - } - - for (var i = 0; i < series.length; ++i) { - executeHooks(hooks.drawSeries, [ctx, series[i]]); - drawSeries(series[i]); - } - - executeHooks(hooks.draw, [ctx]); - - if (grid.show && grid.aboveData) { - drawGrid(); - } - - surface.render(); - - // A draw implies that either the axes or data have changed, so we - // should probably update the overlay highlights as well. - - triggerRedrawOverlay(); - } - - function extractRange(ranges, coord) { - var axis, from, to, key, axes = allAxes(); - - for (var i = 0; i < axes.length; ++i) { - axis = axes[i]; - if (axis.direction == coord) { - key = coord + axis.n + "axis"; - if (!ranges[key] && axis.n == 1) - key = coord + "axis"; // support x1axis as xaxis - if (ranges[key]) { - from = ranges[key].from; - to = ranges[key].to; - break; - } - } - } - - // backwards-compat stuff - to be removed in future - if (!ranges[key]) { - axis = coord == "x" ? xaxes[0] : yaxes[0]; - from = ranges[coord + "1"]; - to = ranges[coord + "2"]; - } - - // auto-reverse as an added bonus - if (from != null && to != null && from > to) { - var tmp = from; - from = to; - to = tmp; - } - - return { from: from, to: to, axis: axis }; - } - - function drawBackground() { - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - ctx.fillStyle = getColorOrGradient(options.grid.backgroundColor, plotHeight, 0, "rgba(255, 255, 255, 0)"); - ctx.fillRect(0, 0, plotWidth, plotHeight); - ctx.restore(); - } - - function drawGrid() { - var i, axes, bw, bc; - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - // draw markings - var markings = options.grid.markings; - if (markings) { - if ($.isFunction(markings)) { - axes = plot.getAxes(); - // xmin etc. is backwards compatibility, to be - // removed in the future - axes.xmin = axes.xaxis.min; - axes.xmax = axes.xaxis.max; - axes.ymin = axes.yaxis.min; - axes.ymax = axes.yaxis.max; - - markings = markings(axes); - } - - for (i = 0; i < markings.length; ++i) { - var m = markings[i], - xrange = extractRange(m, "x"), - yrange = extractRange(m, "y"); - - // fill in missing - if (xrange.from == null) - xrange.from = xrange.axis.min; - if (xrange.to == null) - xrange.to = xrange.axis.max; - if (yrange.from == null) - yrange.from = yrange.axis.min; - if (yrange.to == null) - yrange.to = yrange.axis.max; - - // clip - if (xrange.to < xrange.axis.min || xrange.from > xrange.axis.max || - yrange.to < yrange.axis.min || yrange.from > yrange.axis.max) - continue; - - xrange.from = Math.max(xrange.from, xrange.axis.min); - xrange.to = Math.min(xrange.to, xrange.axis.max); - yrange.from = Math.max(yrange.from, yrange.axis.min); - yrange.to = Math.min(yrange.to, yrange.axis.max); - - var xequal = xrange.from === xrange.to, - yequal = yrange.from === yrange.to; - - if (xequal && yequal) { - continue; - } - - // then draw - xrange.from = Math.floor(xrange.axis.p2c(xrange.from)); - xrange.to = Math.floor(xrange.axis.p2c(xrange.to)); - yrange.from = Math.floor(yrange.axis.p2c(yrange.from)); - yrange.to = Math.floor(yrange.axis.p2c(yrange.to)); - - if (xequal || yequal) { - var lineWidth = m.lineWidth || options.grid.markingsLineWidth, - subPixel = lineWidth % 2 ? 0.5 : 0; - ctx.beginPath(); - ctx.strokeStyle = m.color || options.grid.markingsColor; - ctx.lineWidth = lineWidth; - if (xequal) { - ctx.moveTo(xrange.to + subPixel, yrange.from); - ctx.lineTo(xrange.to + subPixel, yrange.to); - } else { - ctx.moveTo(xrange.from, yrange.to + subPixel); - ctx.lineTo(xrange.to, yrange.to + subPixel); - } - ctx.stroke(); - } else { - ctx.fillStyle = m.color || options.grid.markingsColor; - ctx.fillRect(xrange.from, yrange.to, - xrange.to - xrange.from, - yrange.from - yrange.to); - } - } - } - - // draw the ticks - axes = allAxes(); - bw = options.grid.borderWidth; - - for (var j = 0; j < axes.length; ++j) { - var axis = axes[j], box = axis.box, - t = axis.tickLength, x, y, xoff, yoff; - if (!axis.show || axis.ticks.length == 0) - continue; - - ctx.lineWidth = 1; - - // find the edges - if (axis.direction == "x") { - x = 0; - if (t == "full") - y = (axis.position == "top" ? 0 : plotHeight); - else - y = box.top - plotOffset.top + (axis.position == "top" ? box.height : 0); - } - else { - y = 0; - if (t == "full") - x = (axis.position == "left" ? 0 : plotWidth); - else - x = box.left - plotOffset.left + (axis.position == "left" ? box.width : 0); - } - - // draw tick bar - if (!axis.innermost) { - ctx.strokeStyle = axis.options.color; - ctx.beginPath(); - xoff = yoff = 0; - if (axis.direction == "x") - xoff = plotWidth + 1; - else - yoff = plotHeight + 1; - - if (ctx.lineWidth == 1) { - if (axis.direction == "x") { - y = Math.floor(y) + 0.5; - } else { - x = Math.floor(x) + 0.5; - } - } - - ctx.moveTo(x, y); - ctx.lineTo(x + xoff, y + yoff); - ctx.stroke(); - } - - // draw ticks - - ctx.strokeStyle = axis.options.tickColor; - - ctx.beginPath(); - for (i = 0; i < axis.ticks.length; ++i) { - var v = axis.ticks[i].v; - - xoff = yoff = 0; - - if (isNaN(v) || v < axis.min || v > axis.max - // skip those lying on the axes if we got a border - || (t == "full" - && ((typeof bw == "object" && bw[axis.position] > 0) || bw > 0) - && (v == axis.min || v == axis.max))) - continue; - - if (axis.direction == "x") { - x = axis.p2c(v); - yoff = t == "full" ? -plotHeight : t; - - if (axis.position == "top") - yoff = -yoff; - } - else { - y = axis.p2c(v); - xoff = t == "full" ? -plotWidth : t; - - if (axis.position == "left") - xoff = -xoff; - } - - if (ctx.lineWidth == 1) { - if (axis.direction == "x") - x = Math.floor(x) + 0.5; - else - y = Math.floor(y) + 0.5; - } - - ctx.moveTo(x, y); - ctx.lineTo(x + xoff, y + yoff); - } - - ctx.stroke(); - } - - - // draw border - if (bw) { - // If either borderWidth or borderColor is an object, then draw the border - // line by line instead of as one rectangle - bc = options.grid.borderColor; - if(typeof bw == "object" || typeof bc == "object") { - if (typeof bw !== "object") { - bw = {top: bw, right: bw, bottom: bw, left: bw}; - } - if (typeof bc !== "object") { - bc = {top: bc, right: bc, bottom: bc, left: bc}; - } - - if (bw.top > 0) { - ctx.strokeStyle = bc.top; - ctx.lineWidth = bw.top; - ctx.beginPath(); - ctx.moveTo(0 - bw.left, 0 - bw.top/2); - ctx.lineTo(plotWidth, 0 - bw.top/2); - ctx.stroke(); - } - - if (bw.right > 0) { - ctx.strokeStyle = bc.right; - ctx.lineWidth = bw.right; - ctx.beginPath(); - ctx.moveTo(plotWidth + bw.right / 2, 0 - bw.top); - ctx.lineTo(plotWidth + bw.right / 2, plotHeight); - ctx.stroke(); - } - - if (bw.bottom > 0) { - ctx.strokeStyle = bc.bottom; - ctx.lineWidth = bw.bottom; - ctx.beginPath(); - ctx.moveTo(plotWidth + bw.right, plotHeight + bw.bottom / 2); - ctx.lineTo(0, plotHeight + bw.bottom / 2); - ctx.stroke(); - } - - if (bw.left > 0) { - ctx.strokeStyle = bc.left; - ctx.lineWidth = bw.left; - ctx.beginPath(); - ctx.moveTo(0 - bw.left/2, plotHeight + bw.bottom); - ctx.lineTo(0- bw.left/2, 0); - ctx.stroke(); - } - } - else { - ctx.lineWidth = bw; - ctx.strokeStyle = options.grid.borderColor; - ctx.strokeRect(-bw/2, -bw/2, plotWidth + bw, plotHeight + bw); - } - } - - ctx.restore(); - } - - function drawAxisLabels() { - - $.each(allAxes(), function (_, axis) { - var box = axis.box, - legacyStyles = axis.direction + "Axis " + axis.direction + axis.n + "Axis", - layer = "flot-" + axis.direction + "-axis flot-" + axis.direction + axis.n + "-axis " + legacyStyles, - font = axis.options.font || "flot-tick-label tickLabel", - tick, x, y, halign, valign; - - // Remove text before checking for axis.show and ticks.length; - // otherwise plugins, like flot-tickrotor, that draw their own - // tick labels will end up with both theirs and the defaults. - - surface.removeText(layer); - - if (!axis.show || axis.ticks.length == 0) - return; - - for (var i = 0; i < axis.ticks.length; ++i) { - - tick = axis.ticks[i]; - if (!tick.label || tick.v < axis.min || tick.v > axis.max) - continue; - - if (axis.direction == "x") { - halign = "center"; - x = plotOffset.left + axis.p2c(tick.v); - if (axis.position == "bottom") { - y = box.top + box.padding; - } else { - y = box.top + box.height - box.padding; - valign = "bottom"; - } - } else { - valign = "middle"; - y = plotOffset.top + axis.p2c(tick.v); - if (axis.position == "left") { - x = box.left + box.width - box.padding; - halign = "right"; - } else { - x = box.left + box.padding; - } - } - - surface.addText(layer, x, y, tick.label, font, null, null, halign, valign); - } - }); - } - - function drawSeries(series) { - if (series.lines.show) - drawSeriesLines(series); - if (series.bars.show) - drawSeriesBars(series); - if (series.points.show) - drawSeriesPoints(series); - } - - function drawSeriesLines(series) { - function plotLine(datapoints, xoffset, yoffset, axisx, axisy) { - var points = datapoints.points, - ps = datapoints.pointsize, - prevx = null, prevy = null; - - ctx.beginPath(); - for (var i = ps; i < points.length; i += ps) { - var x1 = points[i - ps], y1 = points[i - ps + 1], - x2 = points[i], y2 = points[i + 1]; - - if (x1 == null || x2 == null) - continue; - - // clip with ymin - if (y1 <= y2 && y1 < axisy.min) { - if (y2 < axisy.min) - continue; // line segment is outside - // compute new intersection point - x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.min; - } - else if (y2 <= y1 && y2 < axisy.min) { - if (y1 < axisy.min) - continue; - x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.min; - } - - // clip with ymax - if (y1 >= y2 && y1 > axisy.max) { - if (y2 > axisy.max) - continue; - x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.max; - } - else if (y2 >= y1 && y2 > axisy.max) { - if (y1 > axisy.max) - continue; - x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.max; - } - - // clip with xmin - if (x1 <= x2 && x1 < axisx.min) { - if (x2 < axisx.min) - continue; - y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.min; - } - else if (x2 <= x1 && x2 < axisx.min) { - if (x1 < axisx.min) - continue; - y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.min; - } - - // clip with xmax - if (x1 >= x2 && x1 > axisx.max) { - if (x2 > axisx.max) - continue; - y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.max; - } - else if (x2 >= x1 && x2 > axisx.max) { - if (x1 > axisx.max) - continue; - y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.max; - } - - if (x1 != prevx || y1 != prevy) - ctx.moveTo(axisx.p2c(x1) + xoffset, axisy.p2c(y1) + yoffset); - - prevx = x2; - prevy = y2; - ctx.lineTo(axisx.p2c(x2) + xoffset, axisy.p2c(y2) + yoffset); - } - ctx.stroke(); - } - - function plotLineArea(datapoints, axisx, axisy) { - var points = datapoints.points, - ps = datapoints.pointsize, - bottom = Math.min(Math.max(0, axisy.min), axisy.max), - i = 0, top, areaOpen = false, - ypos = 1, segmentStart = 0, segmentEnd = 0; - - // we process each segment in two turns, first forward - // direction to sketch out top, then once we hit the - // end we go backwards to sketch the bottom - while (true) { - if (ps > 0 && i > points.length + ps) - break; - - i += ps; // ps is negative if going backwards - - var x1 = points[i - ps], - y1 = points[i - ps + ypos], - x2 = points[i], y2 = points[i + ypos]; - - if (areaOpen) { - if (ps > 0 && x1 != null && x2 == null) { - // at turning point - segmentEnd = i; - ps = -ps; - ypos = 2; - continue; - } - - if (ps < 0 && i == segmentStart + ps) { - // done with the reverse sweep - ctx.fill(); - areaOpen = false; - ps = -ps; - ypos = 1; - i = segmentStart = segmentEnd + ps; - continue; - } - } - - if (x1 == null || x2 == null) - continue; - - // clip x values - - // clip with xmin - if (x1 <= x2 && x1 < axisx.min) { - if (x2 < axisx.min) - continue; - y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.min; - } - else if (x2 <= x1 && x2 < axisx.min) { - if (x1 < axisx.min) - continue; - y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.min; - } - - // clip with xmax - if (x1 >= x2 && x1 > axisx.max) { - if (x2 > axisx.max) - continue; - y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.max; - } - else if (x2 >= x1 && x2 > axisx.max) { - if (x1 > axisx.max) - continue; - y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.max; - } - - if (!areaOpen) { - // open area - ctx.beginPath(); - ctx.moveTo(axisx.p2c(x1), axisy.p2c(bottom)); - areaOpen = true; - } - - // now first check the case where both is outside - if (y1 >= axisy.max && y2 >= axisy.max) { - ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.max)); - ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.max)); - continue; - } - else if (y1 <= axisy.min && y2 <= axisy.min) { - ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.min)); - ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.min)); - continue; - } - - // else it's a bit more complicated, there might - // be a flat maxed out rectangle first, then a - // triangular cutout or reverse; to find these - // keep track of the current x values - var x1old = x1, x2old = x2; - - // clip the y values, without shortcutting, we - // go through all cases in turn - - // clip with ymin - if (y1 <= y2 && y1 < axisy.min && y2 >= axisy.min) { - x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.min; - } - else if (y2 <= y1 && y2 < axisy.min && y1 >= axisy.min) { - x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.min; - } - - // clip with ymax - if (y1 >= y2 && y1 > axisy.max && y2 <= axisy.max) { - x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.max; - } - else if (y2 >= y1 && y2 > axisy.max && y1 <= axisy.max) { - x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.max; - } - - // if the x value was changed we got a rectangle - // to fill - if (x1 != x1old) { - ctx.lineTo(axisx.p2c(x1old), axisy.p2c(y1)); - // it goes to (x1, y1), but we fill that below - } - - // fill triangular section, this sometimes result - // in redundant points if (x1, y1) hasn't changed - // from previous line to, but we just ignore that - ctx.lineTo(axisx.p2c(x1), axisy.p2c(y1)); - ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2)); - - // fill the other rectangle if it's there - if (x2 != x2old) { - ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2)); - ctx.lineTo(axisx.p2c(x2old), axisy.p2c(y2)); - } - } - } - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - ctx.lineJoin = "round"; - - var lw = series.lines.lineWidth, - sw = series.shadowSize; - // FIXME: consider another form of shadow when filling is turned on - if (lw > 0 && sw > 0) { - // draw shadow as a thick and thin line with transparency - ctx.lineWidth = sw; - ctx.strokeStyle = "rgba(0,0,0,0.1)"; - // position shadow at angle from the mid of line - var angle = Math.PI/18; - plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/2), Math.cos(angle) * (lw/2 + sw/2), series.xaxis, series.yaxis); - ctx.lineWidth = sw/2; - plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/4), Math.cos(angle) * (lw/2 + sw/4), series.xaxis, series.yaxis); - } - - ctx.lineWidth = lw; - ctx.strokeStyle = series.color; - var fillStyle = getFillStyle(series.lines, series.color, 0, plotHeight); - if (fillStyle) { - ctx.fillStyle = fillStyle; - plotLineArea(series.datapoints, series.xaxis, series.yaxis); - } - - if (lw > 0) - plotLine(series.datapoints, 0, 0, series.xaxis, series.yaxis); - ctx.restore(); - } - - function drawSeriesPoints(series) { - function plotPoints(datapoints, radius, fillStyle, offset, shadow, axisx, axisy, symbol) { - var points = datapoints.points, ps = datapoints.pointsize; - - for (var i = 0; i < points.length; i += ps) { - var x = points[i], y = points[i + 1]; - if (x == null || x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) - continue; - - ctx.beginPath(); - x = axisx.p2c(x); - y = axisy.p2c(y) + offset; - if (symbol == "circle") - ctx.arc(x, y, radius, 0, shadow ? Math.PI : Math.PI * 2, false); - else - symbol(ctx, x, y, radius, shadow); - ctx.closePath(); - - if (fillStyle) { - ctx.fillStyle = fillStyle; - ctx.fill(); - } - ctx.stroke(); - } - } - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - var lw = series.points.lineWidth, - sw = series.shadowSize, - radius = series.points.radius, - symbol = series.points.symbol; - - // If the user sets the line width to 0, we change it to a very - // small value. A line width of 0 seems to force the default of 1. - // Doing the conditional here allows the shadow setting to still be - // optional even with a lineWidth of 0. - - if( lw == 0 ) - lw = 0.0001; - - if (lw > 0 && sw > 0) { - // draw shadow in two steps - var w = sw / 2; - ctx.lineWidth = w; - ctx.strokeStyle = "rgba(0,0,0,0.1)"; - plotPoints(series.datapoints, radius, null, w + w/2, true, - series.xaxis, series.yaxis, symbol); - - ctx.strokeStyle = "rgba(0,0,0,0.2)"; - plotPoints(series.datapoints, radius, null, w/2, true, - series.xaxis, series.yaxis, symbol); - } - - ctx.lineWidth = lw; - ctx.strokeStyle = series.color; - plotPoints(series.datapoints, radius, - getFillStyle(series.points, series.color), 0, false, - series.xaxis, series.yaxis, symbol); - ctx.restore(); - } - - function drawBar(x, y, b, barLeft, barRight, fillStyleCallback, axisx, axisy, c, horizontal, lineWidth) { - var left, right, bottom, top, - drawLeft, drawRight, drawTop, drawBottom, - tmp; - - // in horizontal mode, we start the bar from the left - // instead of from the bottom so it appears to be - // horizontal rather than vertical - if (horizontal) { - drawBottom = drawRight = drawTop = true; - drawLeft = false; - left = b; - right = x; - top = y + barLeft; - bottom = y + barRight; - - // account for negative bars - if (right < left) { - tmp = right; - right = left; - left = tmp; - drawLeft = true; - drawRight = false; - } - } - else { - drawLeft = drawRight = drawTop = true; - drawBottom = false; - left = x + barLeft; - right = x + barRight; - bottom = b; - top = y; - - // account for negative bars - if (top < bottom) { - tmp = top; - top = bottom; - bottom = tmp; - drawBottom = true; - drawTop = false; - } - } - - // clip - if (right < axisx.min || left > axisx.max || - top < axisy.min || bottom > axisy.max) - return; - - if (left < axisx.min) { - left = axisx.min; - drawLeft = false; - } - - if (right > axisx.max) { - right = axisx.max; - drawRight = false; - } - - if (bottom < axisy.min) { - bottom = axisy.min; - drawBottom = false; - } - - if (top > axisy.max) { - top = axisy.max; - drawTop = false; - } - - left = axisx.p2c(left); - bottom = axisy.p2c(bottom); - right = axisx.p2c(right); - top = axisy.p2c(top); - - // fill the bar - if (fillStyleCallback) { - c.fillStyle = fillStyleCallback(bottom, top); - c.fillRect(left, top, right - left, bottom - top) - } - - // draw outline - if (lineWidth > 0 && (drawLeft || drawRight || drawTop || drawBottom)) { - c.beginPath(); - - // FIXME: inline moveTo is buggy with excanvas - c.moveTo(left, bottom); - if (drawLeft) - c.lineTo(left, top); - else - c.moveTo(left, top); - if (drawTop) - c.lineTo(right, top); - else - c.moveTo(right, top); - if (drawRight) - c.lineTo(right, bottom); - else - c.moveTo(right, bottom); - if (drawBottom) - c.lineTo(left, bottom); - else - c.moveTo(left, bottom); - c.stroke(); - } - } - - function drawSeriesBars(series) { - function plotBars(datapoints, barLeft, barRight, fillStyleCallback, axisx, axisy) { - var points = datapoints.points, ps = datapoints.pointsize; - - for (var i = 0; i < points.length; i += ps) { - if (points[i] == null) - continue; - drawBar(points[i], points[i + 1], points[i + 2], barLeft, barRight, fillStyleCallback, axisx, axisy, ctx, series.bars.horizontal, series.bars.lineWidth); - } - } - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - // FIXME: figure out a way to add shadows (for instance along the right edge) - ctx.lineWidth = series.bars.lineWidth; - ctx.strokeStyle = series.color; - - var barLeft; - - switch (series.bars.align) { - case "left": - barLeft = 0; - break; - case "right": - barLeft = -series.bars.barWidth; - break; - default: - barLeft = -series.bars.barWidth / 2; - } - - var fillStyleCallback = series.bars.fill ? function (bottom, top) { return getFillStyle(series.bars, series.color, bottom, top); } : null; - plotBars(series.datapoints, barLeft, barLeft + series.bars.barWidth, fillStyleCallback, series.xaxis, series.yaxis); - ctx.restore(); - } - - function getFillStyle(filloptions, seriesColor, bottom, top) { - var fill = filloptions.fill; - if (!fill) - return null; - - if (filloptions.fillColor) - return getColorOrGradient(filloptions.fillColor, bottom, top, seriesColor); - - var c = $.color.parse(seriesColor); - c.a = typeof fill == "number" ? fill : 0.4; - c.normalize(); - return c.toString(); - } - - function insertLegend() { - - if (options.legend.container != null) { - $(options.legend.container).html(""); - } else { - placeholder.find(".legend").remove(); - } - - if (!options.legend.show) { - return; - } - - var fragments = [], entries = [], rowStarted = false, - lf = options.legend.labelFormatter, s, label; - - // Build a list of legend entries, with each having a label and a color - - for (var i = 0; i < series.length; ++i) { - s = series[i]; - if (s.label) { - label = lf ? lf(s.label, s) : s.label; - if (label) { - entries.push({ - label: label, - color: s.color - }); - } - } - } - - // Sort the legend using either the default or a custom comparator - - if (options.legend.sorted) { - if ($.isFunction(options.legend.sorted)) { - entries.sort(options.legend.sorted); - } else if (options.legend.sorted == "reverse") { - entries.reverse(); - } else { - var ascending = options.legend.sorted != "descending"; - entries.sort(function(a, b) { - return a.label == b.label ? 0 : ( - (a.label < b.label) != ascending ? 1 : -1 // Logical XOR - ); - }); - } - } - - // Generate markup for the list of entries, in their final order - - for (var i = 0; i < entries.length; ++i) { - - var entry = entries[i]; - - if (i % options.legend.noColumns == 0) { - if (rowStarted) - fragments.push(''); - fragments.push(''); - rowStarted = true; - } - - fragments.push( - '
' + - '' + entry.label + '' - ); - } - - if (rowStarted) - fragments.push(''); - - if (fragments.length == 0) - return; - - var table = '' + fragments.join("") + '
'; - if (options.legend.container != null) - $(options.legend.container).html(table); - else { - var pos = "", - p = options.legend.position, - m = options.legend.margin; - if (m[0] == null) - m = [m, m]; - if (p.charAt(0) == "n") - pos += 'top:' + (m[1] + plotOffset.top) + 'px;'; - else if (p.charAt(0) == "s") - pos += 'bottom:' + (m[1] + plotOffset.bottom) + 'px;'; - if (p.charAt(1) == "e") - pos += 'right:' + (m[0] + plotOffset.right) + 'px;'; - else if (p.charAt(1) == "w") - pos += 'left:' + (m[0] + plotOffset.left) + 'px;'; - var legend = $('
' + table.replace('style="', 'style="position:absolute;' + pos +';') + '
').appendTo(placeholder); - if (options.legend.backgroundOpacity != 0.0) { - // put in the transparent background - // separately to avoid blended labels and - // label boxes - var c = options.legend.backgroundColor; - if (c == null) { - c = options.grid.backgroundColor; - if (c && typeof c == "string") - c = $.color.parse(c); - else - c = $.color.extract(legend, 'background-color'); - c.a = 1; - c = c.toString(); - } - var div = legend.children(); - $('
').prependTo(legend).css('opacity', options.legend.backgroundOpacity); - } - } - } - - - // interactive features - - var highlights = [], - redrawTimeout = null; - - // returns the data item the mouse is over, or null if none is found - function findNearbyItem(mouseX, mouseY, seriesFilter) { - var maxDistance = options.grid.mouseActiveRadius, - smallestDistance = maxDistance * maxDistance + 1, - item = null, foundPoint = false, i, j, ps; - - for (i = series.length - 1; i >= 0; --i) { - if (!seriesFilter(series[i])) - continue; - - var s = series[i], - axisx = s.xaxis, - axisy = s.yaxis, - points = s.datapoints.points, - mx = axisx.c2p(mouseX), // precompute some stuff to make the loop faster - my = axisy.c2p(mouseY), - maxx = maxDistance / axisx.scale, - maxy = maxDistance / axisy.scale; - - ps = s.datapoints.pointsize; - // with inverse transforms, we can't use the maxx/maxy - // optimization, sadly - if (axisx.options.inverseTransform) - maxx = Number.MAX_VALUE; - if (axisy.options.inverseTransform) - maxy = Number.MAX_VALUE; - - if (s.lines.show || s.points.show) { - for (j = 0; j < points.length; j += ps) { - var x = points[j], y = points[j + 1]; - if (x == null) - continue; - - // For points and lines, the cursor must be within a - // certain distance to the data point - if (x - mx > maxx || x - mx < -maxx || - y - my > maxy || y - my < -maxy) - continue; - - // We have to calculate distances in pixels, not in - // data units, because the scales of the axes may be different - var dx = Math.abs(axisx.p2c(x) - mouseX), - dy = Math.abs(axisy.p2c(y) - mouseY), - dist = dx * dx + dy * dy; // we save the sqrt - - // use <= to ensure last point takes precedence - // (last generally means on top of) - if (dist < smallestDistance) { - smallestDistance = dist; - item = [i, j / ps]; - } - } - } - - if (s.bars.show && !item) { // no other point can be nearby - - var barLeft, barRight; - - switch (s.bars.align) { - case "left": - barLeft = 0; - break; - case "right": - barLeft = -s.bars.barWidth; - break; - default: - barLeft = -s.bars.barWidth / 2; - } - - barRight = barLeft + s.bars.barWidth; - - for (j = 0; j < points.length; j += ps) { - var x = points[j], y = points[j + 1], b = points[j + 2]; - if (x == null) - continue; - - // for a bar graph, the cursor must be inside the bar - if (series[i].bars.horizontal ? - (mx <= Math.max(b, x) && mx >= Math.min(b, x) && - my >= y + barLeft && my <= y + barRight) : - (mx >= x + barLeft && mx <= x + barRight && - my >= Math.min(b, y) && my <= Math.max(b, y))) - item = [i, j / ps]; - } - } - } - - if (item) { - i = item[0]; - j = item[1]; - ps = series[i].datapoints.pointsize; - - return { datapoint: series[i].datapoints.points.slice(j * ps, (j + 1) * ps), - dataIndex: j, - series: series[i], - seriesIndex: i }; - } - - return null; - } - - function onMouseMove(e) { - if (options.grid.hoverable) - triggerClickHoverEvent("plothover", e, - function (s) { return s["hoverable"] != false; }); - } - - function onMouseLeave(e) { - if (options.grid.hoverable) - triggerClickHoverEvent("plothover", e, - function (s) { return false; }); - } - - function onClick(e) { - triggerClickHoverEvent("plotclick", e, - function (s) { return s["clickable"] != false; }); - } - - // trigger click or hover event (they send the same parameters - // so we share their code) - function triggerClickHoverEvent(eventname, event, seriesFilter) { - var offset = eventHolder.offset(), - canvasX = event.pageX - offset.left - plotOffset.left, - canvasY = event.pageY - offset.top - plotOffset.top, - pos = canvasToAxisCoords({ left: canvasX, top: canvasY }); - - pos.pageX = event.pageX; - pos.pageY = event.pageY; - - var item = findNearbyItem(canvasX, canvasY, seriesFilter); - - if (item) { - // fill in mouse pos for any listeners out there - item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left + plotOffset.left, 10); - item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top + plotOffset.top, 10); - } - - if (options.grid.autoHighlight) { - // clear auto-highlights - for (var i = 0; i < highlights.length; ++i) { - var h = highlights[i]; - if (h.auto == eventname && - !(item && h.series == item.series && - h.point[0] == item.datapoint[0] && - h.point[1] == item.datapoint[1])) - unhighlight(h.series, h.point); - } - - if (item) - highlight(item.series, item.datapoint, eventname); - } - - placeholder.trigger(eventname, [ pos, item ]); - } - - function triggerRedrawOverlay() { - var t = options.interaction.redrawOverlayInterval; - if (t == -1) { // skip event queue - drawOverlay(); - return; - } - - if (!redrawTimeout) - redrawTimeout = setTimeout(drawOverlay, t); - } - - function drawOverlay() { - redrawTimeout = null; - - // draw highlights - octx.save(); - overlay.clear(); - octx.translate(plotOffset.left, plotOffset.top); - - var i, hi; - for (i = 0; i < highlights.length; ++i) { - hi = highlights[i]; - - if (hi.series.bars.show) - drawBarHighlight(hi.series, hi.point); - else - drawPointHighlight(hi.series, hi.point); - } - octx.restore(); - - executeHooks(hooks.drawOverlay, [octx]); - } - - function highlight(s, point, auto) { - if (typeof s == "number") - s = series[s]; - - if (typeof point == "number") { - var ps = s.datapoints.pointsize; - point = s.datapoints.points.slice(ps * point, ps * (point + 1)); - } - - var i = indexOfHighlight(s, point); - if (i == -1) { - highlights.push({ series: s, point: point, auto: auto }); - - triggerRedrawOverlay(); - } - else if (!auto) - highlights[i].auto = false; - } - - function unhighlight(s, point) { - if (s == null && point == null) { - highlights = []; - triggerRedrawOverlay(); - return; - } - - if (typeof s == "number") - s = series[s]; - - if (typeof point == "number") { - var ps = s.datapoints.pointsize; - point = s.datapoints.points.slice(ps * point, ps * (point + 1)); - } - - var i = indexOfHighlight(s, point); - if (i != -1) { - highlights.splice(i, 1); - - triggerRedrawOverlay(); - } - } - - function indexOfHighlight(s, p) { - for (var i = 0; i < highlights.length; ++i) { - var h = highlights[i]; - if (h.series == s && h.point[0] == p[0] - && h.point[1] == p[1]) - return i; - } - return -1; - } - - function drawPointHighlight(series, point) { - var x = point[0], y = point[1], - axisx = series.xaxis, axisy = series.yaxis, - highlightColor = (typeof series.highlightColor === "string") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString(); - - if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) - return; - - var pointRadius = series.points.radius + series.points.lineWidth / 2; - octx.lineWidth = pointRadius; - octx.strokeStyle = highlightColor; - var radius = 1.5 * pointRadius; - x = axisx.p2c(x); - y = axisy.p2c(y); - - octx.beginPath(); - if (series.points.symbol == "circle") - octx.arc(x, y, radius, 0, 2 * Math.PI, false); - else - series.points.symbol(octx, x, y, radius, false); - octx.closePath(); - octx.stroke(); - } - - function drawBarHighlight(series, point) { - var highlightColor = (typeof series.highlightColor === "string") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString(), - fillStyle = highlightColor, - barLeft; - - switch (series.bars.align) { - case "left": - barLeft = 0; - break; - case "right": - barLeft = -series.bars.barWidth; - break; - default: - barLeft = -series.bars.barWidth / 2; - } - - octx.lineWidth = series.bars.lineWidth; - octx.strokeStyle = highlightColor; - - drawBar(point[0], point[1], point[2] || 0, barLeft, barLeft + series.bars.barWidth, - function () { return fillStyle; }, series.xaxis, series.yaxis, octx, series.bars.horizontal, series.bars.lineWidth); - } - - function getColorOrGradient(spec, bottom, top, defaultColor) { - if (typeof spec == "string") - return spec; - else { - // assume this is a gradient spec; IE currently only - // supports a simple vertical gradient properly, so that's - // what we support too - var gradient = ctx.createLinearGradient(0, top, 0, bottom); - - for (var i = 0, l = spec.colors.length; i < l; ++i) { - var c = spec.colors[i]; - if (typeof c != "string") { - var co = $.color.parse(defaultColor); - if (c.brightness != null) - co = co.scale('rgb', c.brightness); - if (c.opacity != null) - co.a *= c.opacity; - c = co.toString(); - } - gradient.addColorStop(i / (l - 1), c); - } - - return gradient; - } - } - } - - // Add the plot function to the top level of the jQuery object - - $.plot = function(placeholder, data, options) { - //var t0 = new Date(); - var plot = new Plot($(placeholder), data, options, $.plot.plugins); - //(window.console ? console.log : alert)("time used (msecs): " + ((new Date()).getTime() - t0.getTime())); - return plot; - }; - - $.plot.version = "0.8.3"; - - $.plot.plugins = []; - - // Also add the plot function as a chainable property - - $.fn.plot = function(data, options) { - return this.each(function() { - $.plot(this, data, options); - }); - }; - - // round to nearby lower multiple of base - function floorInBase(n, base) { - return base * Math.floor(n / base); - } - -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.log.js b/src/legacy/ui/public/flot-charts/jquery.flot.log.js deleted file mode 100644 index e32bf5cf7e817..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.log.js +++ /dev/null @@ -1,163 +0,0 @@ -/* @notice - * - * Pretty handling of logarithmic axes. - * Copyright (c) 2007-2014 IOLA and Ole Laursen. - * Licensed under the MIT license. - * Created by Arne de Laat - * Set axis.mode to "log" and make the axis logarithmic using transform: - * axis: { - * mode: 'log', - * transform: function(v) {v <= 0 ? Math.log(v) / Math.LN10 : null}, - * inverseTransform: function(v) {Math.pow(10, v)} - * } - * The transform filters negative and zero values, because those are - * invalid on logarithmic scales. - * This plugin tries to create good looking logarithmic ticks, using - * unicode superscript characters. If all data to be plotted is between two - * powers of ten then the default flot tick generator and renderer are - * used. Logarithmic ticks are places at powers of ten and at half those - * values if there are not to many ticks already (e.g. [1, 5, 10, 50, 100]). - * For details, see https://github.com/flot/flot/pull/1328 -*/ - -(function($) { - - function log10(value) { - /* Get the Log10 of the value - */ - return Math.log(value) / Math.LN10; - } - - function floorAsLog10(value) { - /* Get power of the first power of 10 below the value - */ - return Math.floor(log10(value)); - } - - function ceilAsLog10(value) { - /* Get power of the first power of 10 above the value - */ - return Math.ceil(log10(value)); - } - - - // round to nearby lower multiple of base - function floorInBase(n, base) { - return base * Math.floor(n / base); - } - - function getUnicodePower(power) { - var superscripts = ["⁰", "¹", "²", "³", "⁴", "⁵", "⁶", "⁷", "⁸", "⁹"], - result = "", - str_power = "" + power; - for (var i = 0; i < str_power.length; i++) { - if (str_power[i] === "+") { - } - else if (str_power[i] === "-") { - result += "⁻"; - } - else { - result += superscripts[str_power[i]]; - } - } - return result; - } - - function init(plot) { - plot.hooks.processOptions.push(function (plot) { - $.each(plot.getAxes(), function(axisName, axis) { - - var opts = axis.options; - - if (opts.mode === "log") { - - axis.tickGenerator = function (axis) { - - var ticks = [], - end = ceilAsLog10(axis.max), - start = floorAsLog10(axis.min), - tick = Number.NaN, - i = 0; - - if (axis.min === null || axis.min <= 0) { - // Bad minimum, make ticks from 1 (10**0) to max - start = 0; - axis.min = 0.6; - } - - if (end <= start) { - // Start less than end?! - ticks = [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, - 1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, - 1e7, 1e8, 1e9]; - } - else if (log10(axis.max) - log10(axis.datamin) < 1) { - // Default flot generator incase no powers of 10 - // are between start and end - var prev; - start = floorInBase(axis.min, axis.tickSize); - do { - prev = tick; - tick = start + i * axis.tickSize; - ticks.push(tick); - ++i; - } while (tick < axis.max && tick !== prev); - } - else { - // Make ticks at each power of ten - for (; i <= (end - start); i++) { - tick = Math.pow(10, start + i); - ticks.push(tick); - } - - var length = ticks.length; - - // If not to many ticks also put a tick between - // the powers of ten - if (end - start < 6) { - for (var j = 1; j < length * 2 - 1; j += 2) { - tick = ticks[j - 1] * 5; - ticks.splice(j, 0, tick); - } - } - } - return ticks; - }; - - axis.tickFormatter = function (value, axis) { - var formatted; - if (log10(axis.max) - log10(axis.datamin) < 1) { - // Default flot formatter - var factor = axis.tickDecimals ? Math.pow(10, axis.tickDecimals) : 1; - formatted = "" + Math.round(value * factor) / factor; - if (axis.tickDecimals !== null) { - var decimal = formatted.indexOf("."); - var precision = decimal === -1 ? 0 : formatted.length - decimal - 1; - if (precision < axis.tickDecimals) { - return (precision ? formatted : formatted + ".") + ("" + factor).substr(1, axis.tickDecimals - precision); - } - } - } - else { - var multiplier = "", - exponential = parseFloat(value).toExponential(0), - power = getUnicodePower(exponential.slice(2)); - if (exponential[0] !== "1") { - multiplier = exponential[0] + "x"; - } - formatted = multiplier + "10" + power; - } - return formatted; - }; - } - }); - }); - } - - $.plot.plugins.push({ - init: init, - name: "log", - version: "0.9" - }); - -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.navigate.js b/src/legacy/ui/public/flot-charts/jquery.flot.navigate.js deleted file mode 100644 index 13fb7f17d04b2..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.navigate.js +++ /dev/null @@ -1,346 +0,0 @@ -/* Flot plugin for adding the ability to pan and zoom the plot. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The default behaviour is double click and scrollwheel up/down to zoom in, drag -to pan. The plugin defines plot.zoom({ center }), plot.zoomOut() and -plot.pan( offset ) so you easily can add custom controls. It also fires -"plotpan" and "plotzoom" events, useful for synchronizing plots. - -The plugin supports these options: - - zoom: { - interactive: false - trigger: "dblclick" // or "click" for single click - amount: 1.5 // 2 = 200% (zoom in), 0.5 = 50% (zoom out) - } - - pan: { - interactive: false - cursor: "move" // CSS mouse cursor value used when dragging, e.g. "pointer" - frameRate: 20 - } - - xaxis, yaxis, x2axis, y2axis: { - zoomRange: null // or [ number, number ] (min range, max range) or false - panRange: null // or [ number, number ] (min, max) or false - } - -"interactive" enables the built-in drag/click behaviour. If you enable -interactive for pan, then you'll have a basic plot that supports moving -around; the same for zoom. - -"amount" specifies the default amount to zoom in (so 1.5 = 150%) relative to -the current viewport. - -"cursor" is a standard CSS mouse cursor string used for visual feedback to the -user when dragging. - -"frameRate" specifies the maximum number of times per second the plot will -update itself while the user is panning around on it (set to null to disable -intermediate pans, the plot will then not update until the mouse button is -released). - -"zoomRange" is the interval in which zooming can happen, e.g. with zoomRange: -[1, 100] the zoom will never scale the axis so that the difference between min -and max is smaller than 1 or larger than 100. You can set either end to null -to ignore, e.g. [1, null]. If you set zoomRange to false, zooming on that axis -will be disabled. - -"panRange" confines the panning to stay within a range, e.g. with panRange: -[-10, 20] panning stops at -10 in one end and at 20 in the other. Either can -be null, e.g. [-10, null]. If you set panRange to false, panning on that axis -will be disabled. - -Example API usage: - - plot = $.plot(...); - - // zoom default amount in on the pixel ( 10, 20 ) - plot.zoom({ center: { left: 10, top: 20 } }); - - // zoom out again - plot.zoomOut({ center: { left: 10, top: 20 } }); - - // zoom 200% in on the pixel (10, 20) - plot.zoom({ amount: 2, center: { left: 10, top: 20 } }); - - // pan 100 pixels to the left and 20 down - plot.pan({ left: -100, top: 20 }) - -Here, "center" specifies where the center of the zooming should happen. Note -that this is defined in pixel space, not the space of the data points (you can -use the p2c helpers on the axes in Flot to help you convert between these). - -"amount" is the amount to zoom the viewport relative to the current range, so -1 is 100% (i.e. no change), 1.5 is 150% (zoom in), 0.7 is 70% (zoom out). You -can set the default in the options. - -*/ - -// First two dependencies, jquery.event.drag.js and -// jquery.mousewheel.js, we put them inline here to save people the -// effort of downloading them. - -/* -jquery.event.drag.js ~ v1.5 ~ Copyright (c) 2008, Three Dub Media (http://threedubmedia.com) -Licensed under the MIT License ~ http://threedubmedia.googlecode.com/files/MIT-LICENSE.txt -*/ -(function(a){function e(h){var k,j=this,l=h.data||{};if(l.elem)j=h.dragTarget=l.elem,h.dragProxy=d.proxy||j,h.cursorOffsetX=l.pageX-l.left,h.cursorOffsetY=l.pageY-l.top,h.offsetX=h.pageX-h.cursorOffsetX,h.offsetY=h.pageY-h.cursorOffsetY;else if(d.dragging||l.which>0&&h.which!=l.which||a(h.target).is(l.not))return;switch(h.type){case"mousedown":return a.extend(l,a(j).offset(),{elem:j,target:h.target,pageX:h.pageX,pageY:h.pageY}),b.add(document,"mousemove mouseup",e,l),i(j,!1),d.dragging=null,!1;case!d.dragging&&"mousemove":if(g(h.pageX-l.pageX)+g(h.pageY-l.pageY) max) { - // make sure min < max - var tmp = min; - min = max; - max = tmp; - } - - //Check that we are in panRange - if (pr) { - if (pr[0] != null && min < pr[0]) { - min = pr[0]; - } - if (pr[1] != null && max > pr[1]) { - max = pr[1]; - } - } - - var range = max - min; - if (zr && - ((zr[0] != null && range < zr[0] && amount >1) || - (zr[1] != null && range > zr[1] && amount <1))) - return; - - opts.min = min; - opts.max = max; - }); - - plot.setupGrid(); - plot.draw(); - - if (!args.preventEvent) - plot.getPlaceholder().trigger("plotzoom", [ plot, args ]); - }; - - plot.pan = function (args) { - var delta = { - x: +args.left, - y: +args.top - }; - - if (isNaN(delta.x)) - delta.x = 0; - if (isNaN(delta.y)) - delta.y = 0; - - $.each(plot.getAxes(), function (_, axis) { - var opts = axis.options, - min, max, d = delta[axis.direction]; - - min = axis.c2p(axis.p2c(axis.min) + d), - max = axis.c2p(axis.p2c(axis.max) + d); - - var pr = opts.panRange; - if (pr === false) // no panning on this axis - return; - - if (pr) { - // check whether we hit the wall - if (pr[0] != null && pr[0] > min) { - d = pr[0] - min; - min += d; - max += d; - } - - if (pr[1] != null && pr[1] < max) { - d = pr[1] - max; - min += d; - max += d; - } - } - - opts.min = min; - opts.max = max; - }); - - plot.setupGrid(); - plot.draw(); - - if (!args.preventEvent) - plot.getPlaceholder().trigger("plotpan", [ plot, args ]); - }; - - function shutdown(plot, eventHolder) { - eventHolder.unbind(plot.getOptions().zoom.trigger, onZoomClick); - eventHolder.unbind("mousewheel", onMouseWheel); - eventHolder.unbind("dragstart", onDragStart); - eventHolder.unbind("drag", onDrag); - eventHolder.unbind("dragend", onDragEnd); - if (panTimeout) - clearTimeout(panTimeout); - } - - plot.hooks.bindEvents.push(bindEvents); - plot.hooks.shutdown.push(shutdown); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'navigate', - version: '1.3' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.pie.js b/src/legacy/ui/public/flot-charts/jquery.flot.pie.js deleted file mode 100644 index 06f900bdc950f..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.pie.js +++ /dev/null @@ -1,824 +0,0 @@ -/* Flot plugin for rendering pie charts. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The plugin assumes that each series has a single data value, and that each -value is a positive integer or zero. Negative numbers don't make sense for a -pie chart, and have unpredictable results. The values do NOT need to be -passed in as percentages; the plugin will calculate the total and per-slice -percentages internally. - -* Created by Brian Medendorp - -* Updated with contributions from btburnett3, Anthony Aragues and Xavi Ivars - -The plugin supports these options: - - series: { - pie: { - show: true/false - radius: 0-1 for percentage of fullsize, or a specified pixel length, or 'auto' - innerRadius: 0-1 for percentage of fullsize or a specified pixel length, for creating a donut effect - startAngle: 0-2 factor of PI used for starting angle (in radians) i.e 3/2 starts at the top, 0 and 2 have the same result - tilt: 0-1 for percentage to tilt the pie, where 1 is no tilt, and 0 is completely flat (nothing will show) - offset: { - top: integer value to move the pie up or down - left: integer value to move the pie left or right, or 'auto' - }, - stroke: { - color: any hexadecimal color value (other formats may or may not work, so best to stick with something like '#FFF') - width: integer pixel width of the stroke - }, - label: { - show: true/false, or 'auto' - formatter: a user-defined function that modifies the text/style of the label text - radius: 0-1 for percentage of fullsize, or a specified pixel length - background: { - color: any hexadecimal color value (other formats may or may not work, so best to stick with something like '#000') - opacity: 0-1 - }, - threshold: 0-1 for the percentage value at which to hide labels (if they're too small) - }, - combine: { - threshold: 0-1 for the percentage value at which to combine slices (if they're too small) - color: any hexadecimal color value (other formats may or may not work, so best to stick with something like '#CCC'), if null, the plugin will automatically use the color of the first slice to be combined - label: any text value of what the combined slice should be labeled - } - highlight: { - opacity: 0-1 - } - } - } - -More detail and specific examples can be found in the included HTML file. - -*/ - -import { i18n } from '@kbn/i18n'; - -(function($) { - // Maximum redraw attempts when fitting labels within the plot - - var REDRAW_ATTEMPTS = 10; - - // Factor by which to shrink the pie when fitting labels within the plot - - var REDRAW_SHRINK = 0.95; - - function init(plot) { - - var canvas = null, - target = null, - options = null, - maxRadius = null, - centerLeft = null, - centerTop = null, - processed = false, - ctx = null; - - // interactive variables - - var highlights = []; - - // add hook to determine if pie plugin in enabled, and then perform necessary operations - - plot.hooks.processOptions.push(function(plot, options) { - if (options.series.pie.show) { - - options.grid.show = false; - - // set labels.show - - if (options.series.pie.label.show == "auto") { - if (options.legend.show) { - options.series.pie.label.show = false; - } else { - options.series.pie.label.show = true; - } - } - - // set radius - - if (options.series.pie.radius == "auto") { - if (options.series.pie.label.show) { - options.series.pie.radius = 3/4; - } else { - options.series.pie.radius = 1; - } - } - - // ensure sane tilt - - if (options.series.pie.tilt > 1) { - options.series.pie.tilt = 1; - } else if (options.series.pie.tilt < 0) { - options.series.pie.tilt = 0; - } - } - }); - - plot.hooks.bindEvents.push(function(plot, eventHolder) { - var options = plot.getOptions(); - if (options.series.pie.show) { - if (options.grid.hoverable) { - eventHolder.unbind("mousemove").mousemove(onMouseMove); - } - if (options.grid.clickable) { - eventHolder.unbind("click").click(onClick); - } - } - }); - - plot.hooks.processDatapoints.push(function(plot, series, data, datapoints) { - var options = plot.getOptions(); - if (options.series.pie.show) { - processDatapoints(plot, series, data, datapoints); - } - }); - - plot.hooks.drawOverlay.push(function(plot, octx) { - var options = plot.getOptions(); - if (options.series.pie.show) { - drawOverlay(plot, octx); - } - }); - - plot.hooks.draw.push(function(plot, newCtx) { - var options = plot.getOptions(); - if (options.series.pie.show) { - draw(plot, newCtx); - } - }); - - function processDatapoints(plot, series, datapoints) { - if (!processed) { - processed = true; - canvas = plot.getCanvas(); - target = $(canvas).parent(); - options = plot.getOptions(); - plot.setData(combine(plot.getData())); - } - } - - function combine(data) { - - var total = 0, - combined = 0, - numCombined = 0, - color = options.series.pie.combine.color, - newdata = []; - - // Fix up the raw data from Flot, ensuring the data is numeric - - for (var i = 0; i < data.length; ++i) { - - var value = data[i].data; - - // If the data is an array, we'll assume that it's a standard - // Flot x-y pair, and are concerned only with the second value. - - // Note how we use the original array, rather than creating a - // new one; this is more efficient and preserves any extra data - // that the user may have stored in higher indexes. - - if ($.isArray(value) && value.length == 1) { - value = value[0]; - } - - if ($.isArray(value)) { - // Equivalent to $.isNumeric() but compatible with jQuery < 1.7 - if (!isNaN(parseFloat(value[1])) && isFinite(value[1])) { - value[1] = +value[1]; - } else { - value[1] = 0; - } - } else if (!isNaN(parseFloat(value)) && isFinite(value)) { - value = [1, +value]; - } else { - value = [1, 0]; - } - - data[i].data = [value]; - } - - // Sum up all the slices, so we can calculate percentages for each - - for (var i = 0; i < data.length; ++i) { - total += data[i].data[0][1]; - } - - // Count the number of slices with percentages below the combine - // threshold; if it turns out to be just one, we won't combine. - - for (var i = 0; i < data.length; ++i) { - var value = data[i].data[0][1]; - if (value / total <= options.series.pie.combine.threshold) { - combined += value; - numCombined++; - if (!color) { - color = data[i].color; - } - } - } - - for (var i = 0; i < data.length; ++i) { - var value = data[i].data[0][1]; - if (numCombined < 2 || value / total > options.series.pie.combine.threshold) { - newdata.push( - $.extend(data[i], { /* extend to allow keeping all other original data values - and using them e.g. in labelFormatter. */ - data: [[1, value]], - color: data[i].color, - label: data[i].label, - angle: value * Math.PI * 2 / total, - percent: value / (total / 100) - }) - ); - } - } - - if (numCombined > 1) { - newdata.push({ - data: [[1, combined]], - color: color, - label: options.series.pie.combine.label, - angle: combined * Math.PI * 2 / total, - percent: combined / (total / 100) - }); - } - - return newdata; - } - - function draw(plot, newCtx) { - - if (!target) { - return; // if no series were passed - } - - var canvasWidth = plot.getPlaceholder().width(), - canvasHeight = plot.getPlaceholder().height(), - legendWidth = target.children().filter(".legend").children().width() || 0; - - ctx = newCtx; - - // WARNING: HACK! REWRITE THIS CODE AS SOON AS POSSIBLE! - - // When combining smaller slices into an 'other' slice, we need to - // add a new series. Since Flot gives plugins no way to modify the - // list of series, the pie plugin uses a hack where the first call - // to processDatapoints results in a call to setData with the new - // list of series, then subsequent processDatapoints do nothing. - - // The plugin-global 'processed' flag is used to control this hack; - // it starts out false, and is set to true after the first call to - // processDatapoints. - - // Unfortunately this turns future setData calls into no-ops; they - // call processDatapoints, the flag is true, and nothing happens. - - // To fix this we'll set the flag back to false here in draw, when - // all series have been processed, so the next sequence of calls to - // processDatapoints once again starts out with a slice-combine. - // This is really a hack; in 0.9 we need to give plugins a proper - // way to modify series before any processing begins. - - processed = false; - - // calculate maximum radius and center point - - maxRadius = Math.min(canvasWidth, canvasHeight / options.series.pie.tilt) / 2; - centerTop = canvasHeight / 2 + options.series.pie.offset.top; - centerLeft = canvasWidth / 2; - - if (options.series.pie.offset.left == "auto") { - if (options.legend.position.match("w")) { - centerLeft += legendWidth / 2; - } else { - centerLeft -= legendWidth / 2; - } - if (centerLeft < maxRadius) { - centerLeft = maxRadius; - } else if (centerLeft > canvasWidth - maxRadius) { - centerLeft = canvasWidth - maxRadius; - } - } else { - centerLeft += options.series.pie.offset.left; - } - - var slices = plot.getData(), - attempts = 0; - - // Keep shrinking the pie's radius until drawPie returns true, - // indicating that all the labels fit, or we try too many times. - - do { - if (attempts > 0) { - maxRadius *= REDRAW_SHRINK; - } - attempts += 1; - clear(); - if (options.series.pie.tilt <= 0.8) { - drawShadow(); - } - } while (!drawPie() && attempts < REDRAW_ATTEMPTS) - - if (attempts >= REDRAW_ATTEMPTS) { - clear(); - const errorMessage = i18n.translate('common.ui.flotCharts.pie.unableToDrawLabelsInsideCanvasErrorMessage', { - defaultMessage: 'Could not draw pie with labels contained inside canvas', - }); - target.prepend(`
${errorMessage}
`); - } - - if (plot.setSeries && plot.insertLegend) { - plot.setSeries(slices); - plot.insertLegend(); - } - - // we're actually done at this point, just defining internal functions at this point - - function clear() { - ctx.clearRect(0, 0, canvasWidth, canvasHeight); - target.children().filter(".pieLabel, .pieLabelBackground").remove(); - } - - function drawShadow() { - - var shadowLeft = options.series.pie.shadow.left; - var shadowTop = options.series.pie.shadow.top; - var edge = 10; - var alpha = options.series.pie.shadow.alpha; - var radius = options.series.pie.radius > 1 ? options.series.pie.radius : maxRadius * options.series.pie.radius; - - if (radius >= canvasWidth / 2 - shadowLeft || radius * options.series.pie.tilt >= canvasHeight / 2 - shadowTop || radius <= edge) { - return; // shadow would be outside canvas, so don't draw it - } - - ctx.save(); - ctx.translate(shadowLeft,shadowTop); - ctx.globalAlpha = alpha; - ctx.fillStyle = "#000"; - - // center and rotate to starting position - - ctx.translate(centerLeft,centerTop); - ctx.scale(1, options.series.pie.tilt); - - //radius -= edge; - - for (var i = 1; i <= edge; i++) { - ctx.beginPath(); - ctx.arc(0, 0, radius, 0, Math.PI * 2, false); - ctx.fill(); - radius -= i; - } - - ctx.restore(); - } - - function drawPie() { - - var startAngle = Math.PI * options.series.pie.startAngle; - var radius = options.series.pie.radius > 1 ? options.series.pie.radius : maxRadius * options.series.pie.radius; - - // center and rotate to starting position - - ctx.save(); - ctx.translate(centerLeft,centerTop); - ctx.scale(1, options.series.pie.tilt); - //ctx.rotate(startAngle); // start at top; -- This doesn't work properly in Opera - - // draw slices - - ctx.save(); - var currentAngle = startAngle; - for (var i = 0; i < slices.length; ++i) { - slices[i].startAngle = currentAngle; - drawSlice(slices[i].angle, slices[i].color, true); - } - ctx.restore(); - - // draw slice outlines - - if (options.series.pie.stroke.width > 0) { - ctx.save(); - ctx.lineWidth = options.series.pie.stroke.width; - currentAngle = startAngle; - for (var i = 0; i < slices.length; ++i) { - drawSlice(slices[i].angle, options.series.pie.stroke.color, false); - } - ctx.restore(); - } - - // draw donut hole - - drawDonutHole(ctx); - - ctx.restore(); - - // Draw the labels, returning true if they fit within the plot - - if (options.series.pie.label.show) { - return drawLabels(); - } else return true; - - function drawSlice(angle, color, fill) { - - if (angle <= 0 || isNaN(angle)) { - return; - } - - if (fill) { - ctx.fillStyle = color; - } else { - ctx.strokeStyle = color; - ctx.lineJoin = "round"; - } - - ctx.beginPath(); - if (Math.abs(angle - Math.PI * 2) > 0.000000001) { - ctx.moveTo(0, 0); // Center of the pie - } - - //ctx.arc(0, 0, radius, 0, angle, false); // This doesn't work properly in Opera - ctx.arc(0, 0, radius,currentAngle, currentAngle + angle / 2, false); - ctx.arc(0, 0, radius,currentAngle + angle / 2, currentAngle + angle, false); - ctx.closePath(); - //ctx.rotate(angle); // This doesn't work properly in Opera - currentAngle += angle; - - if (fill) { - ctx.fill(); - } else { - ctx.stroke(); - } - } - - function drawLabels() { - - var currentAngle = startAngle; - var radius = options.series.pie.label.radius > 1 ? options.series.pie.label.radius : maxRadius * options.series.pie.label.radius; - - for (var i = 0; i < slices.length; ++i) { - if (slices[i].percent >= options.series.pie.label.threshold * 100) { - if (!drawLabel(slices[i], currentAngle, i)) { - return false; - } - } - currentAngle += slices[i].angle; - } - - return true; - - function drawLabel(slice, startAngle, index) { - - if (slice.data[0][1] == 0) { - return true; - } - - // format label text - - var lf = options.legend.labelFormatter, text, plf = options.series.pie.label.formatter; - - if (lf) { - text = lf(slice.label, slice); - } else { - text = slice.label; - } - - if (plf) { - text = plf(text, slice); - } - - var halfAngle = ((startAngle + slice.angle) + startAngle) / 2; - var x = centerLeft + Math.round(Math.cos(halfAngle) * radius); - var y = centerTop + Math.round(Math.sin(halfAngle) * radius) * options.series.pie.tilt; - - var html = "" + text + ""; - target.append(html); - - var label = target.children("#pieLabel" + index); - var labelTop = (y - label.height() / 2); - var labelLeft = (x - label.width() / 2); - - label.css("top", labelTop); - label.css("left", labelLeft); - - // check to make sure that the label is not outside the canvas - - if (0 - labelTop > 0 || 0 - labelLeft > 0 || canvasHeight - (labelTop + label.height()) < 0 || canvasWidth - (labelLeft + label.width()) < 0) { - return false; - } - - if (options.series.pie.label.background.opacity != 0) { - - // put in the transparent background separately to avoid blended labels and label boxes - - var c = options.series.pie.label.background.color; - - if (c == null) { - c = slice.color; - } - - var pos = "top:" + labelTop + "px;left:" + labelLeft + "px;"; - $("
") - .css("opacity", options.series.pie.label.background.opacity) - .insertBefore(label); - } - - return true; - } // end individual label function - } // end drawLabels function - } // end drawPie function - } // end draw function - - // Placed here because it needs to be accessed from multiple locations - - function drawDonutHole(layer) { - if (options.series.pie.innerRadius > 0) { - - // subtract the center - - layer.save(); - var innerRadius = options.series.pie.innerRadius > 1 ? options.series.pie.innerRadius : maxRadius * options.series.pie.innerRadius; - layer.globalCompositeOperation = "destination-out"; // this does not work with excanvas, but it will fall back to using the stroke color - layer.beginPath(); - layer.fillStyle = options.series.pie.stroke.color; - layer.arc(0, 0, innerRadius, 0, Math.PI * 2, false); - layer.fill(); - layer.closePath(); - layer.restore(); - - // add inner stroke - - layer.save(); - layer.beginPath(); - layer.strokeStyle = options.series.pie.stroke.color; - layer.arc(0, 0, innerRadius, 0, Math.PI * 2, false); - layer.stroke(); - layer.closePath(); - layer.restore(); - - // TODO: add extra shadow inside hole (with a mask) if the pie is tilted. - } - } - - //-- Additional Interactive related functions -- - - function isPointInPoly(poly, pt) { - for(var c = false, i = -1, l = poly.length, j = l - 1; ++i < l; j = i) - ((poly[i][1] <= pt[1] && pt[1] < poly[j][1]) || (poly[j][1] <= pt[1] && pt[1]< poly[i][1])) - && (pt[0] < (poly[j][0] - poly[i][0]) * (pt[1] - poly[i][1]) / (poly[j][1] - poly[i][1]) + poly[i][0]) - && (c = !c); - return c; - } - - function findNearbySlice(mouseX, mouseY) { - - var slices = plot.getData(), - options = plot.getOptions(), - radius = options.series.pie.radius > 1 ? options.series.pie.radius : maxRadius * options.series.pie.radius, - x, y; - - for (var i = 0; i < slices.length; ++i) { - - var s = slices[i]; - - if (s.pie.show) { - - ctx.save(); - ctx.beginPath(); - ctx.moveTo(0, 0); // Center of the pie - //ctx.scale(1, options.series.pie.tilt); // this actually seems to break everything when here. - ctx.arc(0, 0, radius, s.startAngle, s.startAngle + s.angle / 2, false); - ctx.arc(0, 0, radius, s.startAngle + s.angle / 2, s.startAngle + s.angle, false); - ctx.closePath(); - x = mouseX - centerLeft; - y = mouseY - centerTop; - - if (ctx.isPointInPath) { - if (ctx.isPointInPath(mouseX - centerLeft, mouseY - centerTop)) { - ctx.restore(); - return { - datapoint: [s.percent, s.data], - dataIndex: 0, - series: s, - seriesIndex: i - }; - } - } else { - - // excanvas for IE doesn;t support isPointInPath, this is a workaround. - - var p1X = radius * Math.cos(s.startAngle), - p1Y = radius * Math.sin(s.startAngle), - p2X = radius * Math.cos(s.startAngle + s.angle / 4), - p2Y = radius * Math.sin(s.startAngle + s.angle / 4), - p3X = radius * Math.cos(s.startAngle + s.angle / 2), - p3Y = radius * Math.sin(s.startAngle + s.angle / 2), - p4X = radius * Math.cos(s.startAngle + s.angle / 1.5), - p4Y = radius * Math.sin(s.startAngle + s.angle / 1.5), - p5X = radius * Math.cos(s.startAngle + s.angle), - p5Y = radius * Math.sin(s.startAngle + s.angle), - arrPoly = [[0, 0], [p1X, p1Y], [p2X, p2Y], [p3X, p3Y], [p4X, p4Y], [p5X, p5Y]], - arrPoint = [x, y]; - - // TODO: perhaps do some mathematical trickery here with the Y-coordinate to compensate for pie tilt? - - if (isPointInPoly(arrPoly, arrPoint)) { - ctx.restore(); - return { - datapoint: [s.percent, s.data], - dataIndex: 0, - series: s, - seriesIndex: i - }; - } - } - - ctx.restore(); - } - } - - return null; - } - - function onMouseMove(e) { - triggerClickHoverEvent("plothover", e); - } - - function onClick(e) { - triggerClickHoverEvent("plotclick", e); - } - - // trigger click or hover event (they send the same parameters so we share their code) - - function triggerClickHoverEvent(eventname, e) { - - var offset = plot.offset(); - var canvasX = parseInt(e.pageX - offset.left); - var canvasY = parseInt(e.pageY - offset.top); - var item = findNearbySlice(canvasX, canvasY); - - if (options.grid.autoHighlight) { - - // clear auto-highlights - - for (var i = 0; i < highlights.length; ++i) { - var h = highlights[i]; - if (h.auto == eventname && !(item && h.series == item.series)) { - unhighlight(h.series); - } - } - } - - // highlight the slice - - if (item) { - highlight(item.series, eventname); - } - - // trigger any hover bind events - - var pos = { pageX: e.pageX, pageY: e.pageY }; - target.trigger(eventname, [pos, item]); - } - - function highlight(s, auto) { - //if (typeof s == "number") { - // s = series[s]; - //} - - var i = indexOfHighlight(s); - - if (i == -1) { - highlights.push({ series: s, auto: auto }); - plot.triggerRedrawOverlay(); - } else if (!auto) { - highlights[i].auto = false; - } - } - - function unhighlight(s) { - if (s == null) { - highlights = []; - plot.triggerRedrawOverlay(); - } - - //if (typeof s == "number") { - // s = series[s]; - //} - - var i = indexOfHighlight(s); - - if (i != -1) { - highlights.splice(i, 1); - plot.triggerRedrawOverlay(); - } - } - - function indexOfHighlight(s) { - for (var i = 0; i < highlights.length; ++i) { - var h = highlights[i]; - if (h.series == s) - return i; - } - return -1; - } - - function drawOverlay(plot, octx) { - - var options = plot.getOptions(); - - var radius = options.series.pie.radius > 1 ? options.series.pie.radius : maxRadius * options.series.pie.radius; - - octx.save(); - octx.translate(centerLeft, centerTop); - octx.scale(1, options.series.pie.tilt); - - for (var i = 0; i < highlights.length; ++i) { - drawHighlight(highlights[i].series); - } - - drawDonutHole(octx); - - octx.restore(); - - function drawHighlight(series) { - - if (series.angle <= 0 || isNaN(series.angle)) { - return; - } - - //octx.fillStyle = parseColor(options.series.pie.highlight.color).scale(null, null, null, options.series.pie.highlight.opacity).toString(); - octx.fillStyle = "rgba(255, 255, 255, " + options.series.pie.highlight.opacity + ")"; // this is temporary until we have access to parseColor - octx.beginPath(); - if (Math.abs(series.angle - Math.PI * 2) > 0.000000001) { - octx.moveTo(0, 0); // Center of the pie - } - octx.arc(0, 0, radius, series.startAngle, series.startAngle + series.angle / 2, false); - octx.arc(0, 0, radius, series.startAngle + series.angle / 2, series.startAngle + series.angle, false); - octx.closePath(); - octx.fill(); - } - } - } // end init (plugin body) - - // define pie specific options and their default values - - var options = { - series: { - pie: { - show: false, - radius: "auto", // actual radius of the visible pie (based on full calculated radius if <=1, or hard pixel value) - innerRadius: 0, /* for donut */ - startAngle: 3/2, - tilt: 1, - shadow: { - left: 5, // shadow left offset - top: 15, // shadow top offset - alpha: 0.02 // shadow alpha - }, - offset: { - top: 0, - left: "auto" - }, - stroke: { - color: "#fff", - width: 1 - }, - label: { - show: "auto", - formatter: function(label, slice) { - return "
" + label + "
" + Math.round(slice.percent) + "%
"; - }, // formatter function - radius: 1, // radius at which to place the labels (based on full calculated radius if <=1, or hard pixel value) - background: { - color: null, - opacity: 0 - }, - threshold: 0 // percentage at which to hide the label (i.e. the slice is too narrow) - }, - combine: { - threshold: -1, // percentage at which to combine little slices into one larger slice - color: null, // color to give the new slice (auto-generated if null) - label: "Other" // label to give the new slice - }, - highlight: { - //color: "#fff", // will add this functionality once parseColor is available - opacity: 0.5 - } - } - } - }; - - $.plot.plugins.push({ - init: init, - options: options, - name: "pie", - version: "1.1" - }); - -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.resize.js b/src/legacy/ui/public/flot-charts/jquery.flot.resize.js deleted file mode 100644 index 8a626dda0addb..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.resize.js +++ /dev/null @@ -1,59 +0,0 @@ -/* Flot plugin for automatically redrawing plots as the placeholder resizes. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -It works by listening for changes on the placeholder div (through the jQuery -resize event plugin) - if the size changes, it will redraw the plot. - -There are no options. If you need to disable the plugin for some plots, you -can just fix the size of their placeholders. - -*/ - -/* Inline dependency: - * jQuery resize event - v1.1 - 3/14/2010 - * http://benalman.com/projects/jquery-resize-plugin/ - * - * Copyright (c) 2010 "Cowboy" Ben Alman - * Dual licensed under the MIT and GPL licenses. - * http://benalman.com/about/license/ - */ -(function($,e,t){"$:nomunge";var i=[],n=$.resize=$.extend($.resize,{}),a,r=false,s="setTimeout",u="resize",m=u+"-special-event",o="pendingDelay",l="activeDelay",f="throttleWindow";n[o]=200;n[l]=20;n[f]=true;$.event.special[u]={setup:function(){if(!n[f]&&this[s]){return false}var e=$(this);i.push(this);e.data(m,{w:e.width(),h:e.height()});if(i.length===1){a=t;h()}},teardown:function(){if(!n[f]&&this[s]){return false}var e=$(this);for(var t=i.length-1;t>=0;t--){if(i[t]==this){i.splice(t,1);break}}e.removeData(m);if(!i.length){if(r){cancelAnimationFrame(a)}else{clearTimeout(a)}a=null}},add:function(e){if(!n[f]&&this[s]){return false}var i;function a(e,n,a){var r=$(this),s=r.data(m)||{};s.w=n!==t?n:r.width();s.h=a!==t?a:r.height();i.apply(this,arguments)}if($.isFunction(e)){i=e;return a}else{i=e.handler;e.handler=a}}};function h(t){if(r===true){r=t||1}for(var s=i.length-1;s>=0;s--){var l=$(i[s]);if(l[0]==e||l.is(":visible")){var f=l.width(),c=l.height(),d=l.data(m);if(d&&(f!==d.w||c!==d.h)){l.trigger(u,[d.w=f,d.h=c]);r=t||true}}else{d=l.data(m);d.w=0;d.h=0}}if(a!==null){if(r&&(t==null||t-r<1e3)){a=e.requestAnimationFrame(h)}else{a=setTimeout(h,n[o]);r=false}}}if(!e.requestAnimationFrame){e.requestAnimationFrame=function(){return e.webkitRequestAnimationFrame||e.mozRequestAnimationFrame||e.oRequestAnimationFrame||e.msRequestAnimationFrame||function(t,i){return e.setTimeout(function(){t((new Date).getTime())},n[l])}}()}if(!e.cancelAnimationFrame){e.cancelAnimationFrame=function(){return e.webkitCancelRequestAnimationFrame||e.mozCancelRequestAnimationFrame||e.oCancelRequestAnimationFrame||e.msCancelRequestAnimationFrame||clearTimeout}()}})(jQuery,this); - -(function ($) { - var options = { }; // no options - - function init(plot) { - function onResize() { - var placeholder = plot.getPlaceholder(); - - // somebody might have hidden us and we can't plot - // when we don't have the dimensions - if (placeholder.width() == 0 || placeholder.height() == 0) - return; - - plot.resize(); - plot.setupGrid(); - plot.draw(); - } - - function bindEvents(plot, eventHolder) { - plot.getPlaceholder().resize(onResize); - } - - function shutdown(plot, eventHolder) { - plot.getPlaceholder().unbind("resize", onResize); - } - - plot.hooks.bindEvents.push(bindEvents); - plot.hooks.shutdown.push(shutdown); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'resize', - version: '1.0' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.selection.js b/src/legacy/ui/public/flot-charts/jquery.flot.selection.js deleted file mode 100644 index c8707b30f4e6f..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.selection.js +++ /dev/null @@ -1,360 +0,0 @@ -/* Flot plugin for selecting regions of a plot. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The plugin supports these options: - -selection: { - mode: null or "x" or "y" or "xy", - color: color, - shape: "round" or "miter" or "bevel", - minSize: number of pixels -} - -Selection support is enabled by setting the mode to one of "x", "y" or "xy". -In "x" mode, the user will only be able to specify the x range, similarly for -"y" mode. For "xy", the selection becomes a rectangle where both ranges can be -specified. "color" is color of the selection (if you need to change the color -later on, you can get to it with plot.getOptions().selection.color). "shape" -is the shape of the corners of the selection. - -"minSize" is the minimum size a selection can be in pixels. This value can -be customized to determine the smallest size a selection can be and still -have the selection rectangle be displayed. When customizing this value, the -fact that it refers to pixels, not axis units must be taken into account. -Thus, for example, if there is a bar graph in time mode with BarWidth set to 1 -minute, setting "minSize" to 1 will not make the minimum selection size 1 -minute, but rather 1 pixel. Note also that setting "minSize" to 0 will prevent -"plotunselected" events from being fired when the user clicks the mouse without -dragging. - -When selection support is enabled, a "plotselected" event will be emitted on -the DOM element you passed into the plot function. The event handler gets a -parameter with the ranges selected on the axes, like this: - - placeholder.bind( "plotselected", function( event, ranges ) { - alert("You selected " + ranges.xaxis.from + " to " + ranges.xaxis.to) - // similar for yaxis - with multiple axes, the extra ones are in - // x2axis, x3axis, ... - }); - -The "plotselected" event is only fired when the user has finished making the -selection. A "plotselecting" event is fired during the process with the same -parameters as the "plotselected" event, in case you want to know what's -happening while it's happening, - -A "plotunselected" event with no arguments is emitted when the user clicks the -mouse to remove the selection. As stated above, setting "minSize" to 0 will -destroy this behavior. - -The plugin also adds the following methods to the plot object: - -- setSelection( ranges, preventEvent ) - - Set the selection rectangle. The passed in ranges is on the same form as - returned in the "plotselected" event. If the selection mode is "x", you - should put in either an xaxis range, if the mode is "y" you need to put in - an yaxis range and both xaxis and yaxis if the selection mode is "xy", like - this: - - setSelection({ xaxis: { from: 0, to: 10 }, yaxis: { from: 40, to: 60 } }); - - setSelection will trigger the "plotselected" event when called. If you don't - want that to happen, e.g. if you're inside a "plotselected" handler, pass - true as the second parameter. If you are using multiple axes, you can - specify the ranges on any of those, e.g. as x2axis/x3axis/... instead of - xaxis, the plugin picks the first one it sees. - -- clearSelection( preventEvent ) - - Clear the selection rectangle. Pass in true to avoid getting a - "plotunselected" event. - -- getSelection() - - Returns the current selection in the same format as the "plotselected" - event. If there's currently no selection, the function returns null. - -*/ - -(function ($) { - function init(plot) { - var selection = { - first: { x: -1, y: -1}, second: { x: -1, y: -1}, - show: false, - active: false - }; - - // FIXME: The drag handling implemented here should be - // abstracted out, there's some similar code from a library in - // the navigation plugin, this should be massaged a bit to fit - // the Flot cases here better and reused. Doing this would - // make this plugin much slimmer. - var savedhandlers = {}; - - var mouseUpHandler = null; - - function onMouseMove(e) { - if (selection.active) { - updateSelection(e); - - plot.getPlaceholder().trigger("plotselecting", [ getSelection() ]); - } - } - - function onMouseDown(e) { - if (e.which != 1) // only accept left-click - return; - - // cancel out any text selections - document.body.focus(); - - // prevent text selection and drag in old-school browsers - if (document.onselectstart !== undefined && savedhandlers.onselectstart == null) { - savedhandlers.onselectstart = document.onselectstart; - document.onselectstart = function () { return false; }; - } - if (document.ondrag !== undefined && savedhandlers.ondrag == null) { - savedhandlers.ondrag = document.ondrag; - document.ondrag = function () { return false; }; - } - - setSelectionPos(selection.first, e); - - selection.active = true; - - // this is a bit silly, but we have to use a closure to be - // able to whack the same handler again - mouseUpHandler = function (e) { onMouseUp(e); }; - - $(document).one("mouseup", mouseUpHandler); - } - - function onMouseUp(e) { - mouseUpHandler = null; - - // revert drag stuff for old-school browsers - if (document.onselectstart !== undefined) - document.onselectstart = savedhandlers.onselectstart; - if (document.ondrag !== undefined) - document.ondrag = savedhandlers.ondrag; - - // no more dragging - selection.active = false; - updateSelection(e); - - if (selectionIsSane()) - triggerSelectedEvent(); - else { - // this counts as a clear - plot.getPlaceholder().trigger("plotunselected", [ ]); - plot.getPlaceholder().trigger("plotselecting", [ null ]); - } - - return false; - } - - function getSelection() { - if (!selectionIsSane()) - return null; - - if (!selection.show) return null; - - var r = {}, c1 = selection.first, c2 = selection.second; - $.each(plot.getAxes(), function (name, axis) { - if (axis.used) { - var p1 = axis.c2p(c1[axis.direction]), p2 = axis.c2p(c2[axis.direction]); - r[name] = { from: Math.min(p1, p2), to: Math.max(p1, p2) }; - } - }); - return r; - } - - function triggerSelectedEvent() { - var r = getSelection(); - - plot.getPlaceholder().trigger("plotselected", [ r ]); - - // backwards-compat stuff, to be removed in future - if (r.xaxis && r.yaxis) - plot.getPlaceholder().trigger("selected", [ { x1: r.xaxis.from, y1: r.yaxis.from, x2: r.xaxis.to, y2: r.yaxis.to } ]); - } - - function clamp(min, value, max) { - return value < min ? min: (value > max ? max: value); - } - - function setSelectionPos(pos, e) { - var o = plot.getOptions(); - var offset = plot.getPlaceholder().offset(); - var plotOffset = plot.getPlotOffset(); - pos.x = clamp(0, e.pageX - offset.left - plotOffset.left, plot.width()); - pos.y = clamp(0, e.pageY - offset.top - plotOffset.top, plot.height()); - - if (o.selection.mode == "y") - pos.x = pos == selection.first ? 0 : plot.width(); - - if (o.selection.mode == "x") - pos.y = pos == selection.first ? 0 : plot.height(); - } - - function updateSelection(pos) { - if (pos.pageX == null) - return; - - setSelectionPos(selection.second, pos); - if (selectionIsSane()) { - selection.show = true; - plot.triggerRedrawOverlay(); - } - else - clearSelection(true); - } - - function clearSelection(preventEvent) { - if (selection.show) { - selection.show = false; - plot.triggerRedrawOverlay(); - if (!preventEvent) - plot.getPlaceholder().trigger("plotunselected", [ ]); - } - } - - // function taken from markings support in Flot - function extractRange(ranges, coord) { - var axis, from, to, key, axes = plot.getAxes(); - - for (var k in axes) { - axis = axes[k]; - if (axis.direction == coord) { - key = coord + axis.n + "axis"; - if (!ranges[key] && axis.n == 1) - key = coord + "axis"; // support x1axis as xaxis - if (ranges[key]) { - from = ranges[key].from; - to = ranges[key].to; - break; - } - } - } - - // backwards-compat stuff - to be removed in future - if (!ranges[key]) { - axis = coord == "x" ? plot.getXAxes()[0] : plot.getYAxes()[0]; - from = ranges[coord + "1"]; - to = ranges[coord + "2"]; - } - - // auto-reverse as an added bonus - if (from != null && to != null && from > to) { - var tmp = from; - from = to; - to = tmp; - } - - return { from: from, to: to, axis: axis }; - } - - function setSelection(ranges, preventEvent) { - var axis, range, o = plot.getOptions(); - - if (o.selection.mode == "y") { - selection.first.x = 0; - selection.second.x = plot.width(); - } - else { - range = extractRange(ranges, "x"); - - selection.first.x = range.axis.p2c(range.from); - selection.second.x = range.axis.p2c(range.to); - } - - if (o.selection.mode == "x") { - selection.first.y = 0; - selection.second.y = plot.height(); - } - else { - range = extractRange(ranges, "y"); - - selection.first.y = range.axis.p2c(range.from); - selection.second.y = range.axis.p2c(range.to); - } - - selection.show = true; - plot.triggerRedrawOverlay(); - if (!preventEvent && selectionIsSane()) - triggerSelectedEvent(); - } - - function selectionIsSane() { - var minSize = plot.getOptions().selection.minSize; - return Math.abs(selection.second.x - selection.first.x) >= minSize && - Math.abs(selection.second.y - selection.first.y) >= minSize; - } - - plot.clearSelection = clearSelection; - plot.setSelection = setSelection; - plot.getSelection = getSelection; - - plot.hooks.bindEvents.push(function(plot, eventHolder) { - var o = plot.getOptions(); - if (o.selection.mode != null) { - eventHolder.mousemove(onMouseMove); - eventHolder.mousedown(onMouseDown); - } - }); - - - plot.hooks.drawOverlay.push(function (plot, ctx) { - // draw selection - if (selection.show && selectionIsSane()) { - var plotOffset = plot.getPlotOffset(); - var o = plot.getOptions(); - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - var c = $.color.parse(o.selection.color); - - ctx.strokeStyle = c.scale('a', 0.8).toString(); - ctx.lineWidth = 1; - ctx.lineJoin = o.selection.shape; - ctx.fillStyle = c.scale('a', 0.4).toString(); - - var x = Math.min(selection.first.x, selection.second.x) + 0.5, - y = Math.min(selection.first.y, selection.second.y) + 0.5, - w = Math.abs(selection.second.x - selection.first.x) - 1, - h = Math.abs(selection.second.y - selection.first.y) - 1; - - ctx.fillRect(x, y, w, h); - ctx.strokeRect(x, y, w, h); - - ctx.restore(); - } - }); - - plot.hooks.shutdown.push(function (plot, eventHolder) { - eventHolder.unbind("mousemove", onMouseMove); - eventHolder.unbind("mousedown", onMouseDown); - - if (mouseUpHandler) - $(document).unbind("mouseup", mouseUpHandler); - }); - - } - - $.plot.plugins.push({ - init: init, - options: { - selection: { - mode: null, // one of null, "x", "y" or "xy" - color: "#e8cfac", - shape: "round", // one of "round", "miter", or "bevel" - minSize: 5 // minimum number of pixels - } - }, - name: 'selection', - version: '1.1' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.stack.js b/src/legacy/ui/public/flot-charts/jquery.flot.stack.js deleted file mode 100644 index 0d91c0f3c0160..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.stack.js +++ /dev/null @@ -1,188 +0,0 @@ -/* Flot plugin for stacking data sets rather than overlaying them. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The plugin assumes the data is sorted on x (or y if stacking horizontally). -For line charts, it is assumed that if a line has an undefined gap (from a -null point), then the line above it should have the same gap - insert zeros -instead of "null" if you want another behaviour. This also holds for the start -and end of the chart. Note that stacking a mix of positive and negative values -in most instances doesn't make sense (so it looks weird). - -Two or more series are stacked when their "stack" attribute is set to the same -key (which can be any number or string or just "true"). To specify the default -stack, you can set the stack option like this: - - series: { - stack: null/false, true, or a key (number/string) - } - -You can also specify it for a single series, like this: - - $.plot( $("#placeholder"), [{ - data: [ ... ], - stack: true - }]) - -The stacking order is determined by the order of the data series in the array -(later series end up on top of the previous). - -Internally, the plugin modifies the datapoints in each series, adding an -offset to the y value. For line series, extra data points are inserted through -interpolation. If there's a second y value, it's also adjusted (e.g for bar -charts or filled areas). - -*/ - -(function ($) { - var options = { - series: { stack: null } // or number/string - }; - - function init(plot) { - function findMatchingSeries(s, allseries) { - var res = null; - for (var i = 0; i < allseries.length; ++i) { - if (s == allseries[i]) - break; - - if (allseries[i].stack == s.stack) - res = allseries[i]; - } - - return res; - } - - function stackData(plot, s, datapoints) { - if (s.stack == null || s.stack === false) - return; - - var other = findMatchingSeries(s, plot.getData()); - if (!other) - return; - - var ps = datapoints.pointsize, - points = datapoints.points, - otherps = other.datapoints.pointsize, - otherpoints = other.datapoints.points, - newpoints = [], - px, py, intery, qx, qy, bottom, - withlines = s.lines.show, - horizontal = s.bars.horizontal, - withbottom = ps > 2 && (horizontal ? datapoints.format[2].x : datapoints.format[2].y), - withsteps = withlines && s.lines.steps, - fromgap = true, - keyOffset = horizontal ? 1 : 0, - accumulateOffset = horizontal ? 0 : 1, - i = 0, j = 0, l, m; - - while (true) { - if (i >= points.length) - break; - - l = newpoints.length; - - if (points[i] == null) { - // copy gaps - for (m = 0; m < ps; ++m) - newpoints.push(points[i + m]); - i += ps; - } - else if (j >= otherpoints.length) { - // for lines, we can't use the rest of the points - if (!withlines) { - for (m = 0; m < ps; ++m) - newpoints.push(points[i + m]); - } - i += ps; - } - else if (otherpoints[j] == null) { - // oops, got a gap - for (m = 0; m < ps; ++m) - newpoints.push(null); - fromgap = true; - j += otherps; - } - else { - // cases where we actually got two points - px = points[i + keyOffset]; - py = points[i + accumulateOffset]; - qx = otherpoints[j + keyOffset]; - qy = otherpoints[j + accumulateOffset]; - bottom = 0; - - if (px == qx) { - for (m = 0; m < ps; ++m) - newpoints.push(points[i + m]); - - newpoints[l + accumulateOffset] += qy; - bottom = qy; - - i += ps; - j += otherps; - } - else if (px > qx) { - // we got past point below, might need to - // insert interpolated extra point - if (withlines && i > 0 && points[i - ps] != null) { - intery = py + (points[i - ps + accumulateOffset] - py) * (qx - px) / (points[i - ps + keyOffset] - px); - newpoints.push(qx); - newpoints.push(intery + qy); - for (m = 2; m < ps; ++m) - newpoints.push(points[i + m]); - bottom = qy; - } - - j += otherps; - } - else { // px < qx - if (fromgap && withlines) { - // if we come from a gap, we just skip this point - i += ps; - continue; - } - - for (m = 0; m < ps; ++m) - newpoints.push(points[i + m]); - - // we might be able to interpolate a point below, - // this can give us a better y - if (withlines && j > 0 && otherpoints[j - otherps] != null) - bottom = qy + (otherpoints[j - otherps + accumulateOffset] - qy) * (px - qx) / (otherpoints[j - otherps + keyOffset] - qx); - - newpoints[l + accumulateOffset] += bottom; - - i += ps; - } - - fromgap = false; - - if (l != newpoints.length && withbottom) - newpoints[l + 2] += bottom; - } - - // maintain the line steps invariant - if (withsteps && l != newpoints.length && l > 0 - && newpoints[l] != null - && newpoints[l] != newpoints[l - ps] - && newpoints[l + 1] != newpoints[l - ps + 1]) { - for (m = 0; m < ps; ++m) - newpoints[l + ps + m] = newpoints[l + m]; - newpoints[l + 1] = newpoints[l - ps + 1]; - } - } - - datapoints.points = newpoints; - } - - plot.hooks.processDatapoints.push(stackData); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'stack', - version: '1.2' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.symbol.js b/src/legacy/ui/public/flot-charts/jquery.flot.symbol.js deleted file mode 100644 index 79f634971b6fa..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.symbol.js +++ /dev/null @@ -1,71 +0,0 @@ -/* Flot plugin that adds some extra symbols for plotting points. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The symbols are accessed as strings through the standard symbol options: - - series: { - points: { - symbol: "square" // or "diamond", "triangle", "cross" - } - } - -*/ - -(function ($) { - function processRawData(plot, series, datapoints) { - // we normalize the area of each symbol so it is approximately the - // same as a circle of the given radius - - var handlers = { - square: function (ctx, x, y, radius, shadow) { - // pi * r^2 = (2s)^2 => s = r * sqrt(pi)/2 - var size = radius * Math.sqrt(Math.PI) / 2; - ctx.rect(x - size, y - size, size + size, size + size); - }, - diamond: function (ctx, x, y, radius, shadow) { - // pi * r^2 = 2s^2 => s = r * sqrt(pi/2) - var size = radius * Math.sqrt(Math.PI / 2); - ctx.moveTo(x - size, y); - ctx.lineTo(x, y - size); - ctx.lineTo(x + size, y); - ctx.lineTo(x, y + size); - ctx.lineTo(x - size, y); - }, - triangle: function (ctx, x, y, radius, shadow) { - // pi * r^2 = 1/2 * s^2 * sin (pi / 3) => s = r * sqrt(2 * pi / sin(pi / 3)) - var size = radius * Math.sqrt(2 * Math.PI / Math.sin(Math.PI / 3)); - var height = size * Math.sin(Math.PI / 3); - ctx.moveTo(x - size/2, y + height/2); - ctx.lineTo(x + size/2, y + height/2); - if (!shadow) { - ctx.lineTo(x, y - height/2); - ctx.lineTo(x - size/2, y + height/2); - } - }, - cross: function (ctx, x, y, radius, shadow) { - // pi * r^2 = (2s)^2 => s = r * sqrt(pi)/2 - var size = radius * Math.sqrt(Math.PI) / 2; - ctx.moveTo(x - size, y - size); - ctx.lineTo(x + size, y + size); - ctx.moveTo(x - size, y + size); - ctx.lineTo(x + size, y - size); - } - }; - - var s = series.points.symbol; - if (handlers[s]) - series.points.symbol = handlers[s]; - } - - function init(plot) { - plot.hooks.processDatapoints.push(processRawData); - } - - $.plot.plugins.push({ - init: init, - name: 'symbols', - version: '1.0' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.threshold.js b/src/legacy/ui/public/flot-charts/jquery.flot.threshold.js deleted file mode 100644 index 8c99c401d87e5..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.threshold.js +++ /dev/null @@ -1,142 +0,0 @@ -/* Flot plugin for thresholding data. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -The plugin supports these options: - - series: { - threshold: { - below: number - color: colorspec - } - } - -It can also be applied to a single series, like this: - - $.plot( $("#placeholder"), [{ - data: [ ... ], - threshold: { ... } - }]) - -An array can be passed for multiple thresholding, like this: - - threshold: [{ - below: number1 - color: color1 - },{ - below: number2 - color: color2 - }] - -These multiple threshold objects can be passed in any order since they are -sorted by the processing function. - -The data points below "below" are drawn with the specified color. This makes -it easy to mark points below 0, e.g. for budget data. - -Internally, the plugin works by splitting the data into two series, above and -below the threshold. The extra series below the threshold will have its label -cleared and the special "originSeries" attribute set to the original series. -You may need to check for this in hover events. - -*/ - -(function ($) { - var options = { - series: { threshold: null } // or { below: number, color: color spec} - }; - - function init(plot) { - function thresholdData(plot, s, datapoints, below, color) { - var ps = datapoints.pointsize, i, x, y, p, prevp, - thresholded = $.extend({}, s); // note: shallow copy - - thresholded.datapoints = { points: [], pointsize: ps, format: datapoints.format }; - thresholded.label = null; - thresholded.color = color; - thresholded.threshold = null; - thresholded.originSeries = s; - thresholded.data = []; - - var origpoints = datapoints.points, - addCrossingPoints = s.lines.show; - - var threspoints = []; - var newpoints = []; - var m; - - for (i = 0; i < origpoints.length; i += ps) { - x = origpoints[i]; - y = origpoints[i + 1]; - - prevp = p; - if (y < below) - p = threspoints; - else - p = newpoints; - - if (addCrossingPoints && prevp != p && x != null - && i > 0 && origpoints[i - ps] != null) { - var interx = x + (below - y) * (x - origpoints[i - ps]) / (y - origpoints[i - ps + 1]); - prevp.push(interx); - prevp.push(below); - for (m = 2; m < ps; ++m) - prevp.push(origpoints[i + m]); - - p.push(null); // start new segment - p.push(null); - for (m = 2; m < ps; ++m) - p.push(origpoints[i + m]); - p.push(interx); - p.push(below); - for (m = 2; m < ps; ++m) - p.push(origpoints[i + m]); - } - - p.push(x); - p.push(y); - for (m = 2; m < ps; ++m) - p.push(origpoints[i + m]); - } - - datapoints.points = newpoints; - thresholded.datapoints.points = threspoints; - - if (thresholded.datapoints.points.length > 0) { - var origIndex = $.inArray(s, plot.getData()); - // Insert newly-generated series right after original one (to prevent it from becoming top-most) - plot.getData().splice(origIndex + 1, 0, thresholded); - } - - // FIXME: there are probably some edge cases left in bars - } - - function processThresholds(plot, s, datapoints) { - if (!s.threshold) - return; - - if (s.threshold instanceof Array) { - s.threshold.sort(function(a, b) { - return a.below - b.below; - }); - - $(s.threshold).each(function(i, th) { - thresholdData(plot, s, datapoints, th.below, th.color); - }); - } - else { - thresholdData(plot, s, datapoints, s.threshold.below, s.threshold.color); - } - } - - plot.hooks.processDatapoints.push(processThresholds); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'threshold', - version: '1.2' - }); -})(jQuery); diff --git a/src/legacy/ui/public/flot-charts/jquery.flot.time.js b/src/legacy/ui/public/flot-charts/jquery.flot.time.js deleted file mode 100644 index 7612a03302764..0000000000000 --- a/src/legacy/ui/public/flot-charts/jquery.flot.time.js +++ /dev/null @@ -1,473 +0,0 @@ -/* Pretty handling of time axes. - -Copyright (c) 2007-2014 IOLA and Ole Laursen. -Licensed under the MIT license. - -Set axis.mode to "time" to enable. See the section "Time series data" in -API.txt for details. - -*/ - -import { i18n } from '@kbn/i18n'; - -(function($) { - - var options = { - xaxis: { - timezone: null, // "browser" for local to the client or timezone for timezone-js - timeformat: null, // format string to use - twelveHourClock: false, // 12 or 24 time in time mode - monthNames: null // list of names of months - } - }; - - // round to nearby lower multiple of base - - function floorInBase(n, base) { - return base * Math.floor(n / base); - } - - // Returns a string with the date d formatted according to fmt. - // A subset of the Open Group's strftime format is supported. - - function formatDate(d, fmt, monthNames, dayNames) { - - if (typeof d.strftime == "function") { - return d.strftime(fmt); - } - - var leftPad = function(n, pad) { - n = "" + n; - pad = "" + (pad == null ? "0" : pad); - return n.length == 1 ? pad + n : n; - }; - - var r = []; - var escape = false; - var hours = d.getHours(); - var isAM = hours < 12; - - if (monthNames == null) { - monthNames = [ - i18n.translate('common.ui.flotCharts.janLabel', { - defaultMessage: 'Jan', - }), i18n.translate('common.ui.flotCharts.febLabel', { - defaultMessage: 'Feb', - }), i18n.translate('common.ui.flotCharts.marLabel', { - defaultMessage: 'Mar', - }), i18n.translate('common.ui.flotCharts.aprLabel', { - defaultMessage: 'Apr', - }), i18n.translate('common.ui.flotCharts.mayLabel', { - defaultMessage: 'May', - }), i18n.translate('common.ui.flotCharts.junLabel', { - defaultMessage: 'Jun', - }), i18n.translate('common.ui.flotCharts.julLabel', { - defaultMessage: 'Jul', - }), i18n.translate('common.ui.flotCharts.augLabel', { - defaultMessage: 'Aug', - }), i18n.translate('common.ui.flotCharts.sepLabel', { - defaultMessage: 'Sep', - }), i18n.translate('common.ui.flotCharts.octLabel', { - defaultMessage: 'Oct', - }), i18n.translate('common.ui.flotCharts.novLabel', { - defaultMessage: 'Nov', - }), i18n.translate('common.ui.flotCharts.decLabel', { - defaultMessage: 'Dec', - })]; - } - - if (dayNames == null) { - dayNames = [i18n.translate('common.ui.flotCharts.sunLabel', { - defaultMessage: 'Sun', - }), i18n.translate('common.ui.flotCharts.monLabel', { - defaultMessage: 'Mon', - }), i18n.translate('common.ui.flotCharts.tueLabel', { - defaultMessage: 'Tue', - }), i18n.translate('common.ui.flotCharts.wedLabel', { - defaultMessage: 'Wed', - }), i18n.translate('common.ui.flotCharts.thuLabel', { - defaultMessage: 'Thu', - }), i18n.translate('common.ui.flotCharts.friLabel', { - defaultMessage: 'Fri', - }), i18n.translate('common.ui.flotCharts.satLabel', { - defaultMessage: 'Sat', - })]; - } - - var hours12; - - if (hours > 12) { - hours12 = hours - 12; - } else if (hours == 0) { - hours12 = 12; - } else { - hours12 = hours; - } - - for (var i = 0; i < fmt.length; ++i) { - - var c = fmt.charAt(i); - - if (escape) { - switch (c) { - case 'a': c = "" + dayNames[d.getDay()]; break; - case 'b': c = "" + monthNames[d.getMonth()]; break; - case 'd': c = leftPad(d.getDate()); break; - case 'e': c = leftPad(d.getDate(), " "); break; - case 'h': // For back-compat with 0.7; remove in 1.0 - case 'H': c = leftPad(hours); break; - case 'I': c = leftPad(hours12); break; - case 'l': c = leftPad(hours12, " "); break; - case 'm': c = leftPad(d.getMonth() + 1); break; - case 'M': c = leftPad(d.getMinutes()); break; - // quarters not in Open Group's strftime specification - case 'q': - c = "" + (Math.floor(d.getMonth() / 3) + 1); break; - case 'S': c = leftPad(d.getSeconds()); break; - case 'y': c = leftPad(d.getFullYear() % 100); break; - case 'Y': c = "" + d.getFullYear(); break; - case 'p': c = (isAM) ? ("" + "am") : ("" + "pm"); break; - case 'P': c = (isAM) ? ("" + "AM") : ("" + "PM"); break; - case 'w': c = "" + d.getDay(); break; - } - r.push(c); - escape = false; - } else { - if (c == "%") { - escape = true; - } else { - r.push(c); - } - } - } - - return r.join(""); - } - - // To have a consistent view of time-based data independent of which time - // zone the client happens to be in we need a date-like object independent - // of time zones. This is done through a wrapper that only calls the UTC - // versions of the accessor methods. - - function makeUtcWrapper(d) { - - function addProxyMethod(sourceObj, sourceMethod, targetObj, targetMethod) { - sourceObj[sourceMethod] = function() { - return targetObj[targetMethod].apply(targetObj, arguments); - }; - }; - - var utc = { - date: d - }; - - // support strftime, if found - - if (d.strftime != undefined) { - addProxyMethod(utc, "strftime", d, "strftime"); - } - - addProxyMethod(utc, "getTime", d, "getTime"); - addProxyMethod(utc, "setTime", d, "setTime"); - - var props = ["Date", "Day", "FullYear", "Hours", "Milliseconds", "Minutes", "Month", "Seconds"]; - - for (var p = 0; p < props.length; p++) { - addProxyMethod(utc, "get" + props[p], d, "getUTC" + props[p]); - addProxyMethod(utc, "set" + props[p], d, "setUTC" + props[p]); - } - - return utc; - }; - - // select time zone strategy. This returns a date-like object tied to the - // desired timezone - - function dateGenerator(ts, opts) { - if (opts.timezone == "browser") { - return new Date(ts); - } else if (!opts.timezone || opts.timezone == "utc") { - return makeUtcWrapper(new Date(ts)); - } else if (typeof timezoneJS != "undefined" && typeof timezoneJS.Date != "undefined") { - var d = new timezoneJS.Date(); - // timezone-js is fickle, so be sure to set the time zone before - // setting the time. - d.setTimezone(opts.timezone); - d.setTime(ts); - return d; - } else { - return makeUtcWrapper(new Date(ts)); - } - } - - // map of app. size of time units in milliseconds - - var timeUnitSize = { - "second": 1000, - "minute": 60 * 1000, - "hour": 60 * 60 * 1000, - "day": 24 * 60 * 60 * 1000, - "month": 30 * 24 * 60 * 60 * 1000, - "quarter": 3 * 30 * 24 * 60 * 60 * 1000, - "year": 365.2425 * 24 * 60 * 60 * 1000 - }; - - // the allowed tick sizes, after 1 year we use - // an integer algorithm - - var baseSpec = [ - [1, "second"], [2, "second"], [5, "second"], [10, "second"], - [30, "second"], - [1, "minute"], [2, "minute"], [5, "minute"], [10, "minute"], - [30, "minute"], - [1, "hour"], [2, "hour"], [4, "hour"], - [8, "hour"], [12, "hour"], - [1, "day"], [2, "day"], [3, "day"], - [0.25, "month"], [0.5, "month"], [1, "month"], - [2, "month"] - ]; - - // we don't know which variant(s) we'll need yet, but generating both is - // cheap - - var specMonths = baseSpec.concat([[3, "month"], [6, "month"], - [1, "year"]]); - var specQuarters = baseSpec.concat([[1, "quarter"], [2, "quarter"], - [1, "year"]]); - - function init(plot) { - plot.hooks.processOptions.push(function (plot, options) { - $.each(plot.getAxes(), function(axisName, axis) { - - var opts = axis.options; - - if (opts.mode == "time") { - axis.tickGenerator = function(axis) { - - var ticks = []; - var d = dateGenerator(axis.min, opts); - var minSize = 0; - - // make quarter use a possibility if quarters are - // mentioned in either of these options - - var spec = (opts.tickSize && opts.tickSize[1] === - "quarter") || - (opts.minTickSize && opts.minTickSize[1] === - "quarter") ? specQuarters : specMonths; - - if (opts.minTickSize != null) { - if (typeof opts.tickSize == "number") { - minSize = opts.tickSize; - } else { - minSize = opts.minTickSize[0] * timeUnitSize[opts.minTickSize[1]]; - } - } - - for (var i = 0; i < spec.length - 1; ++i) { - if (axis.delta < (spec[i][0] * timeUnitSize[spec[i][1]] - + spec[i + 1][0] * timeUnitSize[spec[i + 1][1]]) / 2 - && spec[i][0] * timeUnitSize[spec[i][1]] >= minSize) { - break; - } - } - - var size = spec[i][0]; - var unit = spec[i][1]; - - // special-case the possibility of several years - - if (unit == "year") { - - // if given a minTickSize in years, just use it, - // ensuring that it's an integer - - if (opts.minTickSize != null && opts.minTickSize[1] == "year") { - size = Math.floor(opts.minTickSize[0]); - } else { - - var magn = Math.pow(10, Math.floor(Math.log(axis.delta / timeUnitSize.year) / Math.LN10)); - var norm = (axis.delta / timeUnitSize.year) / magn; - - if (norm < 1.5) { - size = 1; - } else if (norm < 3) { - size = 2; - } else if (norm < 7.5) { - size = 5; - } else { - size = 10; - } - - size *= magn; - } - - // minimum size for years is 1 - - if (size < 1) { - size = 1; - } - } - - axis.tickSize = opts.tickSize || [size, unit]; - var tickSize = axis.tickSize[0]; - unit = axis.tickSize[1]; - - var step = tickSize * timeUnitSize[unit]; - - if (unit == "second") { - d.setSeconds(floorInBase(d.getSeconds(), tickSize)); - } else if (unit == "minute") { - d.setMinutes(floorInBase(d.getMinutes(), tickSize)); - } else if (unit == "hour") { - d.setHours(floorInBase(d.getHours(), tickSize)); - } else if (unit == "month") { - d.setMonth(floorInBase(d.getMonth(), tickSize)); - } else if (unit == "quarter") { - d.setMonth(3 * floorInBase(d.getMonth() / 3, - tickSize)); - } else if (unit == "year") { - d.setFullYear(floorInBase(d.getFullYear(), tickSize)); - } - - // reset smaller components - - d.setMilliseconds(0); - - if (step >= timeUnitSize.minute) { - d.setSeconds(0); - } - if (step >= timeUnitSize.hour) { - d.setMinutes(0); - } - if (step >= timeUnitSize.day) { - d.setHours(0); - } - if (step >= timeUnitSize.day * 4) { - d.setDate(1); - } - if (step >= timeUnitSize.month * 2) { - d.setMonth(floorInBase(d.getMonth(), 3)); - } - if (step >= timeUnitSize.quarter * 2) { - d.setMonth(floorInBase(d.getMonth(), 6)); - } - if (step >= timeUnitSize.year) { - d.setMonth(0); - } - - var carry = 0; - var v = Number.NaN; - var prev; - - do { - - prev = v; - v = d.getTime(); - ticks.push(v); - - if (unit == "month" || unit == "quarter") { - if (tickSize < 1) { - - // a bit complicated - we'll divide the - // month/quarter up but we need to take - // care of fractions so we don't end up in - // the middle of a day - - d.setDate(1); - var start = d.getTime(); - d.setMonth(d.getMonth() + - (unit == "quarter" ? 3 : 1)); - var end = d.getTime(); - d.setTime(v + carry * timeUnitSize.hour + (end - start) * tickSize); - carry = d.getHours(); - d.setHours(0); - } else { - d.setMonth(d.getMonth() + - tickSize * (unit == "quarter" ? 3 : 1)); - } - } else if (unit == "year") { - d.setFullYear(d.getFullYear() + tickSize); - } else { - d.setTime(v + step); - } - } while (v < axis.max && v != prev); - - return ticks; - }; - - axis.tickFormatter = function (v, axis) { - - var d = dateGenerator(v, axis.options); - - // first check global format - - if (opts.timeformat != null) { - return formatDate(d, opts.timeformat, opts.monthNames, opts.dayNames); - } - - // possibly use quarters if quarters are mentioned in - // any of these places - - var useQuarters = (axis.options.tickSize && - axis.options.tickSize[1] == "quarter") || - (axis.options.minTickSize && - axis.options.minTickSize[1] == "quarter"); - - var t = axis.tickSize[0] * timeUnitSize[axis.tickSize[1]]; - var span = axis.max - axis.min; - var suffix = (opts.twelveHourClock) ? " %p" : ""; - var hourCode = (opts.twelveHourClock) ? "%I" : "%H"; - var fmt; - - if (t < timeUnitSize.minute) { - fmt = hourCode + ":%M:%S" + suffix; - } else if (t < timeUnitSize.day) { - if (span < 2 * timeUnitSize.day) { - fmt = hourCode + ":%M" + suffix; - } else { - fmt = "%b %d " + hourCode + ":%M" + suffix; - } - } else if (t < timeUnitSize.month) { - fmt = "%b %d"; - } else if ((useQuarters && t < timeUnitSize.quarter) || - (!useQuarters && t < timeUnitSize.year)) { - if (span < timeUnitSize.year) { - fmt = "%b"; - } else { - fmt = "%b %Y"; - } - } else if (useQuarters && t < timeUnitSize.year) { - if (span < timeUnitSize.year) { - fmt = "Q%q"; - } else { - fmt = "Q%q %Y"; - } - } else { - fmt = "%Y"; - } - - var rt = formatDate(d, fmt, opts.monthNames, opts.dayNames); - - return rt; - }; - } - }); - }); - } - - $.plot.plugins.push({ - init: init, - options: options, - name: 'time', - version: '1.0' - }); - - // Time-axis support used to be in Flot core, which exposed the - // formatDate function on the plot object. Various plugins depend - // on the function, so we need to re-expose it here. - - $.plot.formatDate = formatDate; - $.plot.dateGenerator = dateGenerator; - -})(jQuery); diff --git a/src/legacy/ui/public/i18n/__snapshots__/index.test.tsx.snap b/src/legacy/ui/public/i18n/__snapshots__/index.test.tsx.snap deleted file mode 100644 index fd6a0a07ba39c..0000000000000 --- a/src/legacy/ui/public/i18n/__snapshots__/index.test.tsx.snap +++ /dev/null @@ -1,10 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ui/i18n renders children and forwards properties 1`] = ` -
- Context: - - Child: some prop:100500 - -
-`; diff --git a/src/legacy/ui/public/i18n/index.test.tsx b/src/legacy/ui/public/i18n/index.test.tsx deleted file mode 100644 index be8ab4cf8d696..0000000000000 --- a/src/legacy/ui/public/i18n/index.test.tsx +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { render } from 'enzyme'; -import PropTypes from 'prop-types'; -import React from 'react'; - -jest.mock('angular-sanitize', () => {}); -jest.mock('ui/new_platform', () => ({ - npStart: { - core: { - i18n: { Context: ({ children }: any) =>
Context: {children}
}, - }, - }, -})); - -import { wrapInI18nContext } from '.'; - -describe('ui/i18n', () => { - test('renders children and forwards properties', () => { - const mockPropTypes = { - stringProp: PropTypes.string.isRequired, - numberProp: PropTypes.number, - }; - - const WrappedComponent = wrapInI18nContext( - class extends React.PureComponent<{ [P in keyof typeof mockPropTypes]: unknown }> { - public static propTypes = mockPropTypes; - - public render() { - return ( - - Child: {this.props.stringProp}:{this.props.numberProp} - - ); - } - } - ); - - expect(WrappedComponent.propTypes).toBe(mockPropTypes); - expect( - render() - ).toMatchSnapshot(); - }); -}); diff --git a/src/legacy/ui/public/i18n/index.tsx b/src/legacy/ui/public/i18n/index.tsx deleted file mode 100644 index 290e82a1334b9..0000000000000 --- a/src/legacy/ui/public/i18n/index.tsx +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React from 'react'; -// required for `ngSanitize` angular module -import 'angular-sanitize'; - -import { i18nDirective, i18nFilter, I18nProvider } from '@kbn/i18n/angular'; -// @ts-ignore -import { uiModules } from 'ui/modules'; -import { npStart } from 'ui/new_platform'; - -export const I18nContext = npStart.core.i18n.Context; - -export function wrapInI18nContext

(ComponentToWrap: React.ComponentType

) { - const ContextWrapper: React.FC

= (props) => { - return ( - - - - ); - }; - - // Original propTypes from the wrapped component should be re-exposed - // since it will be used by reactDirective Angular service - // that will rely on propTypes to watch attributes with these names - ContextWrapper.propTypes = ComponentToWrap.propTypes; - - return ContextWrapper; -} - -uiModules - .get('i18n', ['ngSanitize']) - .provider('i18n', I18nProvider) - .filter('i18n', i18nFilter) - .directive('i18nId', i18nDirective); diff --git a/src/legacy/ui/public/indexed_array/__tests__/indexed_array.js b/src/legacy/ui/public/indexed_array/__tests__/indexed_array.js deleted file mode 100644 index df96a58a6e99f..0000000000000 --- a/src/legacy/ui/public/indexed_array/__tests__/indexed_array.js +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import expect from '@kbn/expect'; -import { IndexedArray } from '..'; - -// this is generally a data-structure that IndexedArray is good for managing -const users = [ - { name: 'John', id: 6, username: 'beast', group: 'admins' }, - { name: 'Anon', id: 0, username: 'shhhh', group: 'secret' }, - { name: 'Fern', id: 42, username: 'kitty', group: 'editor' }, - { name: 'Mary', id: 55, username: 'sheep', group: 'editor' }, -]; - -// this is how we used to accomplish this, before IndexedArray -users.byName = _.keyBy(users, 'name'); -users.byUsername = _.keyBy(users, 'username'); -users.byGroup = _.groupBy(users, 'group'); -users.inIdOrder = _.sortBy(users, 'id'); - -// then things started becoming unruly... so IndexedArray! - -describe('IndexedArray', function () { - describe('Basics', function () { - let reg; - - beforeEach(function () { - reg = new IndexedArray(); - }); - - it('Extends Array', function () { - expect(reg).to.be.a(Array); - }); - - it('fails basic lodash check', function () { - expect(Array.isArray(reg)).to.be(false); - }); - - it('clones to an object', function () { - expect(_.isObject(_.clone(reg))).to.be(true); - expect(Array.isArray(_.clone(reg))).to.be(false); - }); - }); - - describe('Indexing', function () { - it('provides the initial set', function () { - const reg = new IndexedArray({ - initialSet: [1, 2, 3], - }); - - expect(reg).to.have.length(3); - - reg.forEach(function (v, i) { - expect(v).to.eql(i + 1); - }); - }); - - it('indexes the initial set', function () { - const reg = new IndexedArray({ - index: ['username'], - initialSet: users, - }); - - expect(reg).to.have.property('byUsername'); - expect(reg.byUsername).to.eql(users.byUsername); - }); - - it('updates indices after values are added', function () { - // split up the user list, and add it in chunks - const firstUser = users.slice(0, 1).pop(); - const otherUsers = users.slice(1); - - // start off with all but the first - const reg = new IndexedArray({ - group: ['group'], - order: ['id'], - initialSet: otherUsers, - }); - - // add the first - reg.push(firstUser); - - // end up with the same structure that is in the users fixture - expect(Object.keys(reg.byGroup).length).to.be(Object.keys(users.byGroup).length); - for (const group of Object.keys(reg.byGroup)) { - expect(reg.byGroup[group].toJSON()).to.eql(users.byGroup[group]); - } - - expect(reg.inIdOrder).to.eql(users.inIdOrder); - }); - - it('updates indices after values are removed', function () { - // start off with all - const reg = new IndexedArray({ - group: ['group'], - order: ['id'], - initialSet: users, - }); - - // remove the last - reg.pop(); - - const expectedCount = users.length - 1; - // indexed lists should be updated - expect(reg).to.have.length(expectedCount); - - const sumOfGroups = _.reduce( - reg.byGroup, - function (note, group) { - return note + group.length; - }, - 0 - ); - expect(sumOfGroups).to.eql(expectedCount); - }); - - it('removes items based on a predicate', function () { - const reg = new IndexedArray({ - group: ['group'], - order: ['id'], - initialSet: users, - }); - - reg.remove({ name: 'John' }); - - expect(_.isEqual(reg.raw, reg.slice(0))).to.be(true); - expect(reg.length).to.be(3); - expect(reg[0].name).to.be('Anon'); - }); - - it('updates indices after values are re-ordered', function () { - const rawUsers = users.slice(0); - - // collect and shuffle the ids available - let ids = []; - _.times(rawUsers.length, function (i) { - ids.push(i); - }); - ids = _.shuffle(ids); - - // move something here - const toI = ids.shift(); - // from here - const fromI = ids.shift(); - // do the move - const move = function (arr) { - arr.splice(toI, 0, arr.splice(fromI, 1)[0]); - }; - - const reg = new IndexedArray({ - index: ['username'], - initialSet: rawUsers, - }); - - const index = reg.byUsername; - - move(reg); - - expect(reg.byUsername).to.eql(index); - expect(reg.byUsername).to.not.be(index); - }); - }); - - describe('Ordering', function () { - it('ordering is case insensitive', function () { - const reg = new IndexedArray({ - index: ['title'], - order: ['title'], - initialSet: [{ title: 'APM' }, { title: 'Advanced Settings' }], - }); - - const ordered = reg.inTitleOrder; - expect(ordered[0].title).to.be('Advanced Settings'); - expect(ordered[1].title).to.be('APM'); - }); - - it('ordering handles numbers', function () { - const reg = new IndexedArray({ - index: ['id'], - order: ['id'], - initialSet: users, - }); - - const ordered = reg.inIdOrder; - expect(ordered[0].id).to.be(0); - expect(ordered[1].id).to.be(6); - expect(ordered[2].id).to.be(42); - expect(ordered[3].id).to.be(55); - }); - }); -}); diff --git a/src/legacy/ui/public/indexed_array/__tests__/inflector.js b/src/legacy/ui/public/indexed_array/__tests__/inflector.js deleted file mode 100644 index 49ac79094e501..0000000000000 --- a/src/legacy/ui/public/indexed_array/__tests__/inflector.js +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { inflector } from '../inflector'; -import expect from '@kbn/expect'; - -describe('IndexedArray Inflector', function () { - it('returns a function', function () { - const getter = inflector(); - expect(getter).to.be.a('function'); - }); - - describe('fn', function () { - it('prepends a prefix', function () { - const inflect = inflector('my'); - - expect(inflect('Family')).to.be('myFamily'); - expect(inflect('family')).to.be('myFamily'); - expect(inflect('fAmIlY')).to.be('myFAmIlY'); - }); - - it('adds both a prefix and suffix', function () { - const inflect = inflector('foo', 'Bar'); - - expect(inflect('box')).to.be('fooBoxBar'); - expect(inflect('box.car.MAX')).to.be('fooBoxCarMaxBar'); - expect(inflect('BaZzY')).to.be('fooBaZzYBar'); - }); - - it('ignores prefix if it is already at the end of the inflected string', function () { - const inflect = inflector('foo', 'Bar'); - expect(inflect('fooBox')).to.be('fooBoxBar'); - expect(inflect('FooBox')).to.be('FooBoxBar'); - }); - - it('ignores postfix if it is already at the end of the inflected string', function () { - const inflect = inflector('foo', 'Bar'); - expect(inflect('bar')).to.be('fooBar'); - expect(inflect('showBoxBar')).to.be('fooShowBoxBar'); - }); - - it('works with "name"', function () { - const inflect = inflector('in', 'Order'); - expect(inflect('name')).to.be('inNameOrder'); - }); - }); -}); diff --git a/src/legacy/ui/public/indexed_array/helpers/organize_by.test.ts b/src/legacy/ui/public/indexed_array/helpers/organize_by.test.ts deleted file mode 100644 index fc4ca8469382a..0000000000000 --- a/src/legacy/ui/public/indexed_array/helpers/organize_by.test.ts +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { groupBy } from 'lodash'; -import { organizeBy } from './organize_by'; - -describe('organizeBy', () => { - test('it works', () => { - const col = [ - { - name: 'one', - roles: ['user', 'admin', 'owner'], - }, - { - name: 'two', - roles: ['user'], - }, - { - name: 'three', - roles: ['user'], - }, - { - name: 'four', - roles: ['user', 'admin'], - }, - ]; - - const resp = organizeBy(col, 'roles'); - expect(resp).toHaveProperty('user'); - expect(resp.user.length).toBe(4); - - expect(resp).toHaveProperty('admin'); - expect(resp.admin.length).toBe(2); - - expect(resp).toHaveProperty('owner'); - expect(resp.owner.length).toBe(1); - }); - - test('behaves just like groupBy in normal scenarios', () => { - const col = [{ name: 'one' }, { name: 'two' }, { name: 'three' }, { name: 'four' }]; - - const orgs = organizeBy(col, 'name'); - const groups = groupBy(col, 'name'); - - expect(orgs).toEqual(groups); - }); -}); diff --git a/src/legacy/ui/public/indexed_array/helpers/organize_by.ts b/src/legacy/ui/public/indexed_array/helpers/organize_by.ts deleted file mode 100644 index e923767c892cd..0000000000000 --- a/src/legacy/ui/public/indexed_array/helpers/organize_by.ts +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { each, isFunction } from 'lodash'; - -/** - * Like _.groupBy, but allows specifying multiple groups for a - * single object. - * - * organizeBy([{ a: [1, 2, 3] }, { b: true, a: [1, 4] }], 'a') - * // Object {1: Array[2], 2: Array[1], 3: Array[1], 4: Array[1]} - * - * _.groupBy([{ a: [1, 2, 3] }, { b: true, a: [1, 4] }], 'a') - * // Object {'1,2,3': Array[1], '1,4': Array[1]} - * - * @param {array} collection - the list of values to organize - * @param {Function} callback - either a property name, or a callback. - * @return {object} - */ -export function organizeBy(collection: object[], callback: ((obj: object) => string) | string) { - const buckets: { [key: string]: object[] } = {}; - - function add(key: string, obj: object) { - if (!buckets[key]) { - buckets[key] = []; - } - buckets[key].push(obj); - } - - each(collection, (obj: Record) => { - const keys = isFunction(callback) ? callback(obj) : obj[callback]; - - if (!Array.isArray(keys)) { - add(keys, obj); - return; - } - - let length = keys.length; - while (length-- > 0) { - add(keys[length], obj); - } - }); - - return buckets; -} diff --git a/src/legacy/ui/public/indexed_array/index.d.ts b/src/legacy/ui/public/indexed_array/index.d.ts deleted file mode 100644 index 21c0a818731ac..0000000000000 --- a/src/legacy/ui/public/indexed_array/index.d.ts +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { ListIterator } from 'lodash'; - -interface IndexedArrayConfig { - index?: string[]; - group?: string[]; - order?: string[]; - initialSet?: T[]; - immutable?: boolean; -} - -declare class IndexedArray extends Array { - public immutable: boolean; - public raw: T[]; - - // These may not actually be present, as they are dynamically defined - public inOrder: T[]; - public byType: Record; - public byName: Record; - - constructor(config: IndexedArrayConfig); - - public remove(predicate: ListIterator): T[]; - - public toJSON(): T[]; -} diff --git a/src/legacy/ui/public/indexed_array/index.js b/src/legacy/ui/public/indexed_array/index.js deleted file mode 100644 index 6a42961c9e680..0000000000000 --- a/src/legacy/ui/public/indexed_array/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { IndexedArray } from './indexed_array'; diff --git a/src/legacy/ui/public/indexed_array/indexed_array.js b/src/legacy/ui/public/indexed_array/indexed_array.js deleted file mode 100644 index b9a427b8da7ad..0000000000000 --- a/src/legacy/ui/public/indexed_array/indexed_array.js +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import { inflector } from './inflector'; -import { organizeBy } from './helpers/organize_by'; - -const pathGetter = _(_.get).rearg(1, 0).ary(2); -const inflectIndex = inflector('by'); -const inflectOrder = inflector('in', 'Order'); - -const CLEAR_CACHE = {}; -const OPT_NAMES = ['index', 'group', 'order', 'initialSet', 'immutable']; - -/** - * Generic extension of Array class, which will index (and reindex) the - * objects it contains based on their properties. - * - * @param {Object} config describes the properties of this registry object - * @param {Array} [config.index] a list of props/paths that should be used to index the docs. - * @param {Array} [config.group] a list of keys/paths to group docs by. - * @param {Array} [config.order] a list of keys/paths to order the keys by. - * @param {Array} [config.initialSet] the initial dataset the IndexedArray should contain. - * @param {boolean} [config.immutable] a flag that hints to people reading the implementation that this IndexedArray - * should not be modified - */ - -export class IndexedArray { - static OPT_NAMES = OPT_NAMES; - - constructor(config) { - config = _.pick(config || {}, OPT_NAMES); - - // use defineProperty so that value can't be changed - Object.defineProperty(this, 'raw', { value: [] }); - - this._indexNames = _.union( - this._setupIndex(config.group, inflectIndex, organizeByIndexedArray(config)), - this._setupIndex(config.index, inflectIndex, _.keyBy), - this._setupIndex(config.order, inflectOrder, (raw, pluckValue) => { - return [...raw].sort((itemA, itemB) => { - const a = pluckValue(itemA); - const b = pluckValue(itemB); - if (typeof a === 'number' && typeof b === 'number') { - return a - b; - } - return String(a).toLowerCase().localeCompare(String(b).toLowerCase()); - }); - }) - ); - - if (config.initialSet) { - this.push.apply(this, config.initialSet); - } - - Object.defineProperty(this, 'immutable', { value: !!config.immutable }); - } - - /** - * Remove items from this based on a predicate - * @param {Function|Object|string} predicate - the predicate used to decide what is removed - * @return {array} - the removed data - */ - remove(predicate) { - this._assertMutable('remove'); - const out = _.remove(this, predicate); - _.remove(this.raw, predicate); - this._clearIndices(); - return out; - } - - /** - * provide a hook for the JSON serializer - * @return {array} - a plain, vanilla array with our same data - */ - toJSON() { - return this.raw; - } - - // wrappers for mutable Array methods - copyWithin(...args) { - return this._mutation('copyWithin', args); - } - fill(...args) { - return this._mutation('fill', args); - } - pop(...args) { - return this._mutation('pop', args); - } - push(...args) { - return this._mutation('push', args); - } - reverse(...args) { - return this._mutation('reverse', args); - } - shift(...args) { - return this._mutation('shift', args); - } - sort(...args) { - return this._mutation('sort', args); - } - splice(...args) { - return this._mutation('splice', args); - } - unshift(...args) { - return this._mutation('unshift', args); - } - - /** - * If this instance of IndexedArray is not mutable, throw an error - * @private - * @param {String} methodName - user facing method name, for error message - * @return {undefined} - */ - _assertMutable(methodName) { - if (this.immutable) { - throw new Error(`${methodName}() is not allowed on immutable IndexedArray instances`); - } - } - - /** - * Execute some mutable method from the Array prototype - * on the IndexedArray and this.raw - * - * @private - * @param {string} methodName - * @param {Array} args - * @return {any} - */ - _mutation(methodName, args) { - this._assertMutable(methodName); - super[methodName].apply(this, args); - this._clearIndices(); - return super[methodName].apply(this.raw, args); - } - - /** - * Create indices for a group of object properties. getters and setters are used to - * read and control the indices. - * @private - * @param {string[]} props - the properties that should be used to index docs - * @param {function} inflect - a function that will be called with a property name, and - * creates the public property at which the index will be exposed - * @param {function} op - the function that will be used to create the indices, it is passed - * the raw representation of the registry, and a getter for reading the - * right prop - * - * @returns {string[]} - the public keys of all indices created - */ - _setupIndex(props, inflect, op) { - // shortcut for empty props - if (!props || props.length === 0) return; - - return props.map((prop) => { - const indexName = inflect(prop); - const getIndexValueFromItem = pathGetter.partial(prop).value(); - let cache; - - Object.defineProperty(this, indexName, { - enumerable: false, - configurable: false, - - set: (val) => { - // can't set any value other than the CLEAR_CACHE constant - if (val === CLEAR_CACHE) { - cache = false; - } else { - throw new TypeError(indexName + ' can not be set, it is a computed index of values'); - } - }, - get: () => { - if (!cache) { - cache = op(this.raw, getIndexValueFromItem); - } - - return cache; - }, - }); - - return indexName; - }); - } - - /** - * Clear cached index/group/order caches so they will be recreated - * on next access - * @private - * @return {undefined} - */ - _clearIndices() { - this._indexNames.forEach((name) => { - this[name] = CLEAR_CACHE; - }); - } -} - -// using traditional `extends Array` syntax doesn't work with babel -// See https://babeljs.io/docs/usage/caveats/ -Object.setPrototypeOf(IndexedArray.prototype, Array.prototype); - -// Similar to `organizeBy` but returns IndexedArrays instead of normal Arrays. -function organizeByIndexedArray(config) { - return (...args) => { - const grouped = organizeBy(...args); - - return _.reduce( - grouped, - (acc, value, group) => { - acc[group] = new IndexedArray({ - ...config, - initialSet: value, - }); - - return acc; - }, - {} - ); - }; -} diff --git a/src/legacy/ui/public/indexed_array/inflector.js b/src/legacy/ui/public/indexed_array/inflector.js deleted file mode 100644 index e034146f5f62f..0000000000000 --- a/src/legacy/ui/public/indexed_array/inflector.js +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -function upFirst(str, total) { - return str.charAt(0).toUpperCase() + (total ? str.substr(1).toLowerCase() : str.substr(1)); -} - -function startsWith(str, test) { - return str.substr(0, test.length).toLowerCase() === test.toLowerCase(); -} - -function endsWith(str, test) { - const tooShort = str.length < test.length; - if (tooShort) return; - - return str.substr(str.length - test.length).toLowerCase() === test.toLowerCase(); -} - -export function inflector(prefix, postfix) { - return function inflect(key) { - let inflected; - - if (key.indexOf('.') !== -1) { - inflected = key - .split('.') - .map(function (step, i) { - return i === 0 ? step : upFirst(step, true); - }) - .join(''); - } else { - inflected = key; - } - - if (prefix && !startsWith(key, prefix)) { - inflected = prefix + upFirst(inflected); - } - - if (postfix && !endsWith(key, postfix)) { - inflected = inflected + postfix; - } - - return inflected; - }; -} diff --git a/src/legacy/ui/public/kfetch/__mocks__/index.ts b/src/legacy/ui/public/kfetch/__mocks__/index.ts deleted file mode 100644 index 1a128e2b85260..0000000000000 --- a/src/legacy/ui/public/kfetch/__mocks__/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export const kfetch = () => Promise.resolve(); diff --git a/src/legacy/ui/public/kfetch/_import_objects.ndjson b/src/legacy/ui/public/kfetch/_import_objects.ndjson deleted file mode 100644 index 3511fb44cdfb2..0000000000000 --- a/src/legacy/ui/public/kfetch/_import_objects.ndjson +++ /dev/null @@ -1 +0,0 @@ -{"attributes":{"description":"","kibanaSavedObjectMeta":{"searchSourceJSON":"{\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\"}"},"title":"Log Agents","uiStateJSON":"{}","visState":"{\"title\":\"Log Agents\",\"type\":\"area\",\"params\":{\"type\":\"area\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"agent.raw: Descending\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"agent.raw\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}]}"},"id":"082f1d60-a2e7-11e7-bb30-233be9be6a15","migrationVersion":{"visualization":"7.0.0"},"references":[{"id":"f1e4c910-a2e6-11e7-bb30-233be9be6a15","name":"kibanaSavedObjectMeta.searchSourceJSON.index","type":"index-pattern"}],"type":"visualization","version":1} diff --git a/src/legacy/ui/public/kfetch/index.ts b/src/legacy/ui/public/kfetch/index.ts deleted file mode 100644 index 105df171ad370..0000000000000 --- a/src/legacy/ui/public/kfetch/index.ts +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { npSetup } from 'ui/new_platform'; -import { createKfetch, KFetchKibanaOptions, KFetchOptions } from './kfetch'; -export { addInterceptor, KFetchOptions, KFetchQuery } from './kfetch'; - -const kfetchInstance = createKfetch(npSetup.core.http); - -export const kfetch = (options: KFetchOptions, kfetchOptions?: KFetchKibanaOptions) => { - return kfetchInstance(options, kfetchOptions); -}; diff --git a/src/legacy/ui/public/kfetch/kfetch.test.mocks.ts b/src/legacy/ui/public/kfetch/kfetch.test.mocks.ts deleted file mode 100644 index ea066b3623f13..0000000000000 --- a/src/legacy/ui/public/kfetch/kfetch.test.mocks.ts +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { setup } from '../../../../test_utils/public/http_test_setup'; - -jest.doMock('ui/new_platform', () => ({ - npSetup: { - core: setup(), - }, -})); diff --git a/src/legacy/ui/public/kfetch/kfetch.test.ts b/src/legacy/ui/public/kfetch/kfetch.test.ts deleted file mode 100644 index c45a142d54e9b..0000000000000 --- a/src/legacy/ui/public/kfetch/kfetch.test.ts +++ /dev/null @@ -1,487 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// @ts-ignore -import fetchMock from 'fetch-mock/es5/client'; -import './kfetch.test.mocks'; -import { readFileSync } from 'fs'; -import { join } from 'path'; -import { addInterceptor, kfetch, KFetchOptions } from '.'; -import { Interceptor, resetInterceptors, withDefaultOptions } from './kfetch'; -import { KFetchError } from './kfetch_error'; - -describe('kfetch', () => { - afterEach(() => { - fetchMock.restore(); - resetInterceptors(); - }); - - it('should use supplied request method', async () => { - fetchMock.post('*', {}); - await kfetch({ pathname: '/my/path', method: 'POST' }); - expect(fetchMock.lastOptions()!.method).toBe('POST'); - }); - - it('should use supplied Content-Type', async () => { - fetchMock.get('*', {}); - await kfetch({ pathname: '/my/path', headers: { 'Content-Type': 'CustomContentType' } }); - expect(fetchMock.lastOptions()!.headers).toMatchObject({ - 'content-type': 'CustomContentType', - }); - }); - - it('should use supplied pathname and querystring', async () => { - fetchMock.get('*', {}); - await kfetch({ pathname: '/my/path', query: { a: 'b' } }); - expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path?a=b'); - }); - - it('should use supplied headers', async () => { - fetchMock.get('*', {}); - await kfetch({ - pathname: '/my/path', - headers: { myHeader: 'foo' }, - }); - - expect(fetchMock.lastOptions()!.headers).toEqual({ - 'content-type': 'application/json', - 'kbn-version': 'kibanaVersion', - myheader: 'foo', - }); - }); - - it('should return response', async () => { - fetchMock.get('*', { foo: 'bar' }); - const res = await kfetch({ pathname: '/my/path' }); - expect(res).toEqual({ foo: 'bar' }); - }); - - it('should prepend url with basepath by default', async () => { - fetchMock.get('*', {}); - await kfetch({ pathname: '/my/path' }); - expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path'); - }); - - it('should not prepend url with basepath when disabled', async () => { - fetchMock.get('*', {}); - await kfetch({ pathname: '/my/path' }, { prependBasePath: false }); - expect(fetchMock.lastUrl()).toBe('/my/path'); - }); - - it('should make request with defaults', async () => { - fetchMock.get('*', {}); - await kfetch({ pathname: '/my/path' }); - - expect(fetchMock.lastCall()!.request.credentials).toBe('same-origin'); - expect(fetchMock.lastOptions()!).toMatchObject({ - method: 'GET', - headers: { - 'content-type': 'application/json', - 'kbn-version': 'kibanaVersion', - }, - }); - }); - - it('should make requests for NDJSON content', async () => { - const content = readFileSync(join(__dirname, '_import_objects.ndjson'), { encoding: 'utf-8' }); - - fetchMock.post('*', { - body: content, - headers: { 'Content-Type': 'application/ndjson' }, - }); - - const data = await kfetch({ - method: 'POST', - pathname: '/my/path', - body: content, - headers: { - 'Content-Type': 'multipart/form-data', - }, - }); - - expect(data).toBeInstanceOf(Blob); - - const ndjson = await new Response(data).text(); - - expect(ndjson).toEqual(content); - }); - - it('should reject on network error', async () => { - expect.assertions(1); - fetchMock.get('*', { status: 500 }); - - try { - await kfetch({ pathname: '/my/path' }); - } catch (e) { - expect(e.message).toBe('Internal Server Error'); - } - }); - - describe('when throwing response error (KFetchError)', () => { - let error: KFetchError; - beforeEach(async () => { - fetchMock.get('*', { status: 404, body: { foo: 'bar' } }); - try { - await kfetch({ pathname: '/my/path' }); - } catch (e) { - error = e; - } - }); - - it('should contain error message', () => { - expect(error.message).toBe('Not Found'); - }); - - it('should return response body', () => { - expect(error.body).toEqual({ foo: 'bar' }); - }); - - it('should contain response properties', () => { - expect(error.res.status).toBe(404); - expect(error.res.url).toBe('http://localhost/myBase/my/path'); - }); - }); - - describe('when all interceptor resolves', () => { - let resp: any; - let interceptorCalls: string[]; - - beforeEach(async () => { - fetchMock.get('*', { foo: 'bar' }); - - interceptorCalls = mockInterceptorCalls([{}, {}, {}]); - resp = await kfetch({ pathname: '/my/path' }); - }); - - it('should call interceptors in correct order', () => { - expect(interceptorCalls).toEqual([ - 'Request #3', - 'Request #2', - 'Request #1', - 'Response #1', - 'Response #2', - 'Response #3', - ]); - }); - - it('should make request', () => { - expect(fetchMock.called()).toBe(true); - }); - - it('should return response', () => { - expect(resp).toEqual({ foo: 'bar' }); - }); - }); - - describe('when a request interceptor throws; and the next requestError interceptor resolves', () => { - let resp: any; - let interceptorCalls: string[]; - - beforeEach(async () => { - fetchMock.get('*', { foo: 'bar' }); - - interceptorCalls = mockInterceptorCalls([ - { requestError: () => ({ pathname: '/my/path' } as KFetchOptions) }, - { request: () => Promise.reject(new Error('Error in request')) }, - {}, - ]); - - resp = await kfetch({ pathname: '/my/path' }); - }); - - it('should call interceptors in correct order', () => { - expect(interceptorCalls).toEqual([ - 'Request #3', - 'Request #2', - 'RequestError #1', - 'Response #1', - 'Response #2', - 'Response #3', - ]); - }); - - it('should make request', () => { - expect(fetchMock.called()).toBe(true); - }); - - it('should return response', () => { - expect(resp).toEqual({ foo: 'bar' }); - }); - }); - - describe('when a request interceptor throws', () => { - let error: Error; - let interceptorCalls: string[]; - - beforeEach(async () => { - fetchMock.get('*', { foo: 'bar' }); - - interceptorCalls = mockInterceptorCalls([ - {}, - { request: () => Promise.reject(new Error('Error in request')) }, - {}, - ]); - - try { - await kfetch({ pathname: '/my/path' }); - } catch (e) { - error = e; - } - }); - - it('should call interceptors in correct order', () => { - expect(interceptorCalls).toEqual([ - 'Request #3', - 'Request #2', - 'RequestError #1', - 'ResponseError #1', - 'ResponseError #2', - 'ResponseError #3', - ]); - }); - - it('should not make request', () => { - expect(fetchMock.called()).toBe(false); - }); - - it('should throw error', () => { - expect(error.message).toEqual('Error in request'); - }); - }); - - describe('when a response interceptor throws', () => { - let error: Error; - let interceptorCalls: string[]; - - beforeEach(async () => { - fetchMock.get('*', { foo: 'bar' }); - - interceptorCalls = mockInterceptorCalls([ - { response: () => Promise.reject(new Error('Error in response')) }, - {}, - {}, - ]); - - try { - await kfetch({ pathname: '/my/path' }); - } catch (e) { - error = e; - } - }); - - it('should call in correct order', () => { - expect(interceptorCalls).toEqual([ - 'Request #3', - 'Request #2', - 'Request #1', - 'Response #1', - 'ResponseError #2', - 'ResponseError #3', - ]); - }); - - it('should make request', () => { - expect(fetchMock.called()).toBe(true); - }); - - it('should throw error', () => { - expect(error.message).toEqual('Error in response'); - }); - }); - - describe('when request interceptor throws; and a responseError interceptor resolves', () => { - let resp: any; - let interceptorCalls: string[]; - - beforeEach(async () => { - fetchMock.get('*', { foo: 'bar' }); - - interceptorCalls = mockInterceptorCalls([ - {}, - { - request: () => { - throw new Error('My request error'); - }, - responseError: () => { - return { custom: 'response' }; - }, - }, - {}, - ]); - - resp = await kfetch({ pathname: '/my/path' }); - }); - - it('should call in correct order', () => { - expect(interceptorCalls).toEqual([ - 'Request #3', - 'Request #2', - 'RequestError #1', - 'ResponseError #1', - 'ResponseError #2', - 'Response #3', - ]); - }); - - it('should not make request', () => { - expect(fetchMock.called()).toBe(false); - }); - - it('should resolve', () => { - expect(resp).toEqual({ custom: 'response' }); - }); - }); - - describe('when interceptors return synchronously', () => { - let resp: any; - beforeEach(async () => { - fetchMock.get('*', { foo: 'bar' }); - addInterceptor({ - request: (config) => ({ - ...config, - pathname: '/my/intercepted-route', - }), - response: (res) => ({ - ...res, - addedByResponseInterceptor: true, - }), - }); - - resp = await kfetch({ pathname: '/my/path' }); - }); - - it('should modify request', () => { - expect(fetchMock.lastUrl()).toContain('/my/intercepted-route'); - expect(fetchMock.lastOptions()!).toMatchObject({ - method: 'GET', - }); - }); - - it('should modify response', () => { - expect(resp).toEqual({ - addedByResponseInterceptor: true, - foo: 'bar', - }); - }); - }); - - describe('when interceptors return promise', () => { - let resp: any; - beforeEach(async () => { - fetchMock.get('*', { foo: 'bar' }); - addInterceptor({ - request: (config) => - Promise.resolve({ - ...config, - pathname: '/my/intercepted-route', - }), - response: (res) => - Promise.resolve({ - ...res, - addedByResponseInterceptor: true, - }), - }); - - resp = await kfetch({ pathname: '/my/path' }); - }); - - it('should modify request', () => { - expect(fetchMock.lastUrl()).toContain('/my/intercepted-route'); - expect(fetchMock.lastOptions()!).toMatchObject({ - method: 'GET', - }); - }); - - it('should modify response', () => { - expect(resp).toEqual({ - addedByResponseInterceptor: true, - foo: 'bar', - }); - }); - }); -}); - -function mockInterceptorCalls(interceptors: Interceptor[]) { - const interceptorCalls: string[] = []; - interceptors.forEach((interceptor, i) => { - addInterceptor({ - request: (config) => { - interceptorCalls.push(`Request #${i + 1}`); - - if (interceptor.request) { - return interceptor.request(config); - } - - return config; - }, - requestError: (e) => { - interceptorCalls.push(`RequestError #${i + 1}`); - if (interceptor.requestError) { - return interceptor.requestError(e); - } - - throw e; - }, - response: (res) => { - interceptorCalls.push(`Response #${i + 1}`); - - if (interceptor.response) { - return interceptor.response(res); - } - - return res; - }, - responseError: (e) => { - interceptorCalls.push(`ResponseError #${i + 1}`); - - if (interceptor.responseError) { - return interceptor.responseError(e); - } - - throw e; - }, - }); - }); - - return interceptorCalls; -} - -describe('withDefaultOptions', () => { - it('should remove undefined query params', () => { - const { query } = withDefaultOptions({ - pathname: '/withDefaultOptions', - query: { - foo: 'bar', - param1: (undefined as any) as string, - param2: (null as any) as string, - param3: '', - }, - }); - expect(query).toEqual({ foo: 'bar', param2: null, param3: '' }); - }); - - it('should add default options', () => { - expect(withDefaultOptions({ pathname: '/addDefaultOptions' })).toEqual({ - pathname: '/addDefaultOptions', - credentials: 'same-origin', - headers: { 'Content-Type': 'application/json' }, - method: 'GET', - }); - }); -}); diff --git a/src/legacy/ui/public/kfetch/kfetch.ts b/src/legacy/ui/public/kfetch/kfetch.ts deleted file mode 100644 index 4eb7149931575..0000000000000 --- a/src/legacy/ui/public/kfetch/kfetch.ts +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { merge } from 'lodash'; -// @ts-ignore not really worth typing -import { KFetchError } from './kfetch_error'; - -import { HttpSetup } from '../../../../core/public'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { HttpRequestInit } from '../../../../core/public/http/types'; - -export interface KFetchQuery { - [key: string]: string | number | boolean | undefined; -} - -export interface KFetchOptions extends HttpRequestInit { - pathname: string; - query?: KFetchQuery; - asSystemRequest?: boolean; -} - -export interface KFetchKibanaOptions { - prependBasePath?: boolean; -} - -export interface Interceptor { - request?: (config: KFetchOptions) => Promise | KFetchOptions; - requestError?: (e: any) => Promise | KFetchOptions; - response?: (res: any) => any; - responseError?: (e: any) => any; -} - -const interceptors: Interceptor[] = []; -export const resetInterceptors = () => (interceptors.length = 0); -export const addInterceptor = (interceptor: Interceptor) => interceptors.push(interceptor); - -export function createKfetch(http: HttpSetup) { - return function kfetch( - options: KFetchOptions, - { prependBasePath = true }: KFetchKibanaOptions = {} - ) { - return responseInterceptors( - requestInterceptors(withDefaultOptions(options)) - .then(({ pathname, ...restOptions }) => - http.fetch(pathname, { ...restOptions, prependBasePath }) - ) - .catch((err) => { - throw new KFetchError(err.response || { statusText: err.message }, err.body); - }) - ); - }; -} - -// Request/response interceptors are called in opposite orders. -// Request hooks start from the newest interceptor and end with the oldest. -function requestInterceptors(config: KFetchOptions): Promise { - return interceptors.reduceRight((acc, interceptor) => { - return acc.then(interceptor.request, interceptor.requestError); - }, Promise.resolve(config)); -} - -// Response hooks start from the oldest interceptor and end with the newest. -function responseInterceptors(responsePromise: Promise) { - return interceptors.reduce((acc, interceptor) => { - return acc.then(interceptor.response, interceptor.responseError); - }, responsePromise); -} - -export function withDefaultOptions(options?: KFetchOptions): KFetchOptions { - const withDefaults = merge( - { - method: 'GET', - credentials: 'same-origin', - headers: { - 'Content-Type': 'application/json', - }, - }, - options - ) as KFetchOptions; - - if ( - options && - options.headers && - 'Content-Type' in options.headers && - options.headers['Content-Type'] === undefined - ) { - // TS thinks headers could be undefined here, but that isn't possible because - // of the merge above. - // @ts-ignore - withDefaults.headers['Content-Type'] = undefined; - } - - return withDefaults; -} diff --git a/src/legacy/ui/public/kfetch/kfetch_error.ts b/src/legacy/ui/public/kfetch/kfetch_error.ts deleted file mode 100644 index f351959e624b8..0000000000000 --- a/src/legacy/ui/public/kfetch/kfetch_error.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export class KFetchError extends Error { - constructor(public readonly res: Response, public readonly body?: any) { - super(res.statusText); - - // captureStackTrace is only available in the V8 engine, so any browser using - // a different JS engine won't have access to this method. - if (Error.captureStackTrace) { - Error.captureStackTrace(this, KFetchError); - } - } -} diff --git a/src/legacy/ui/public/legacy_compat/__tests__/xsrf.js b/src/legacy/ui/public/legacy_compat/__tests__/xsrf.js deleted file mode 100644 index efcfb77997265..0000000000000 --- a/src/legacy/ui/public/legacy_compat/__tests__/xsrf.js +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import $ from 'jquery'; -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import ngMock from 'ng_mock'; - -import { $setupXsrfRequestInterceptor } from '../../../../../plugins/kibana_legacy/public'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { version } from '../../../../../core/server/utils/package_json'; - -const xsrfHeader = 'kbn-version'; - -describe('chrome xsrf apis', function () { - const sandbox = sinon.createSandbox(); - - afterEach(function () { - sandbox.restore(); - }); - - describe('jQuery support', function () { - it('adds a global jQuery prefilter', function () { - sandbox.stub($, 'ajaxPrefilter'); - $setupXsrfRequestInterceptor(version); - expect($.ajaxPrefilter.callCount).to.be(1); - }); - - describe('jQuery prefilter', function () { - let prefilter; - - beforeEach(function () { - sandbox.stub($, 'ajaxPrefilter'); - $setupXsrfRequestInterceptor(version); - prefilter = $.ajaxPrefilter.args[0][0]; - }); - - it(`sets the ${xsrfHeader} header`, function () { - const setHeader = sinon.stub(); - prefilter({}, {}, { setRequestHeader: setHeader }); - - expect(setHeader.callCount).to.be(1); - expect(setHeader.args[0]).to.eql([xsrfHeader, version]); - }); - - it('can be canceled by setting the kbnXsrfToken option', function () { - const setHeader = sinon.stub(); - prefilter({ kbnXsrfToken: false }, {}, { setRequestHeader: setHeader }); - expect(setHeader.callCount).to.be(0); - }); - }); - - describe('Angular support', function () { - let $http; - let $httpBackend; - - beforeEach(function () { - sandbox.stub($, 'ajaxPrefilter'); - ngMock.module($setupXsrfRequestInterceptor(version)); - }); - - beforeEach( - ngMock.inject(function ($injector) { - $http = $injector.get('$http'); - $httpBackend = $injector.get('$httpBackend'); - - $httpBackend.when('POST', '/api/test').respond('ok'); - }) - ); - - afterEach(function () { - $httpBackend.verifyNoOutstandingExpectation(); - $httpBackend.verifyNoOutstandingRequest(); - }); - - it(`injects a ${xsrfHeader} header on every request`, function () { - $httpBackend - .expectPOST('/api/test', undefined, function (headers) { - return headers[xsrfHeader] === version; - }) - .respond(200, ''); - - $http.post('/api/test'); - $httpBackend.flush(); - }); - - it('skips requests with the kbnXsrfToken set falsy', function () { - $httpBackend - .expectPOST('/api/test', undefined, function (headers) { - return !(xsrfHeader in headers); - }) - .respond(200, ''); - - $http({ - method: 'POST', - url: '/api/test', - kbnXsrfToken: 0, - }); - - $http({ - method: 'POST', - url: '/api/test', - kbnXsrfToken: '', - }); - - $http({ - method: 'POST', - url: '/api/test', - kbnXsrfToken: false, - }); - - $httpBackend.flush(); - }); - - it('treats the kbnXsrfToken option as boolean-y', function () { - const customToken = `custom:${version}`; - $httpBackend - .expectPOST('/api/test', undefined, function (headers) { - return headers[xsrfHeader] === version; - }) - .respond(200, ''); - - $http({ - method: 'POST', - url: '/api/test', - kbnXsrfToken: customToken, - }); - - $httpBackend.flush(); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/legacy_compat/index.ts b/src/legacy/ui/public/legacy_compat/index.ts deleted file mode 100644 index 2067fa6489304..0000000000000 --- a/src/legacy/ui/public/legacy_compat/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { configureAppAngularModule } from '../../../../plugins/kibana_legacy/public'; diff --git a/src/legacy/ui/public/metadata.ts b/src/legacy/ui/public/metadata.ts deleted file mode 100644 index fade0f0d8629a..0000000000000 --- a/src/legacy/ui/public/metadata.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { npSetup } from 'ui/new_platform'; - -export const metadata: { - branch: string; - version: string; -} = npSetup.core.injectedMetadata.getLegacyMetadata(); diff --git a/src/legacy/ui/public/modules.js b/src/legacy/ui/public/modules.js deleted file mode 100644 index bb1c8aead1c34..0000000000000 --- a/src/legacy/ui/public/modules.js +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import angular from 'angular'; -import _ from 'lodash'; -/** - * This module is used by Kibana to create and reuse angular modules. Angular modules - * can only be created once and need to have their dependencies at creation. This is - * hard/impossible to do in require.js since all of the dependencies for a module are - * loaded before it is. - * - * Here is an example: - * - * In the scenario below, require.js would load directive.js first because it is a - * dependency of app.js. This would cause the call to `angular.module('app')` to - * execute before the module is actually created. This causes angular to throw an - * error. This effect is magnified when app.js links off to many different modules. - * - * This is normally solved by creating unique modules per file, listed as the 1st - * alternate solution below. Unfortunately this solution would have required that - * we replicate our require statements. - * - * app.js - * ``` - * angular.module('app', ['ui.bootstrap']) - * .controller('AppController', function () { ... }); - * - * require('./directive'); - * ``` - * - * directive.js - * ``` - * angular.module('app') - * .directive('someDirective', function () { ... }); - * ``` - * - * Before taking this approach we saw three possible solutions: - * 1. replicate our js modules in angular modules/use a different module per file - * 2. create a single module outside of our js modules and share it - * 3. use a helper lib to dynamically create modules as needed. - * - * We decided to go with #3 - * - * This ends up working by creating a list of modules that the code base creates by - * calling `modules.get(name)` with different names, and then before bootstrapping - * the application kibana uses `modules.link()` to set the dependencies of the "kibana" - * module to include every defined module. This guarantees that kibana can always find - * any angular dependency defined in the kibana code base. This **also** means that - * Private modules are able to find any dependency, since they are injected using the - * "kibana" module's injector. - * - */ -const existingModules = {}; -const links = []; - -/** - * Take an angular module and extends the dependencies for that module to include all of the modules - * created using `ui/modules` - * - * @param {AngularModule} module - the module to extend - * @return {undefined} - */ -export function link(module) { - // as modules are defined they will be set as requirements for this app - links.push(module); - - // merge in the existing modules - module.requires = _.union(module.requires, _.keys(existingModules)); -} - -/** - * The primary means of interacting with `ui/modules`. Returns an angular module. If the module already - * exists the existing version will be returned. `dependencies` are either set as or merged into the - * modules total dependencies. - * - * This is in contrast to the `angular.module(name, [dependencies])` function which will only - * create a module if the `dependencies` list is passed and get an existing module if no dependencies - * are passed. This requires knowing the order that your files will load, which we can't guarantee. - * - * @param {string} moduleName - the unique name for this module - * @param {array[string]} [requires=[]] - the other modules this module requires - * @return {AngularModule} - */ -export function get(moduleName, requires) { - let module = existingModules[moduleName]; - - if (module === void 0) { - // create the module - module = existingModules[moduleName] = angular.module(moduleName, []); - - module.close = _.partial(close, moduleName); - - // ensure that it is required by linked modules - _.each(links, function (app) { - if (!~app.requires.indexOf(moduleName)) app.requires.push(moduleName); - }); - } - - if (requires) { - // update requires list with possibly new requirements - module.requires = _.union(module.requires, requires); - } - - return module; -} - -export function close(moduleName) { - const module = existingModules[moduleName]; - - // already closed - if (!module) return; - - // if the module is currently linked, unlink it - const i = links.indexOf(module); - if (i > -1) links.splice(i, 1); - - // remove from linked modules list of required modules - _.each(links, function (app) { - _.pull(app.requires, moduleName); - }); - - // remove module from existingModules - delete existingModules[moduleName]; -} - -export const uiModules = { link, get, close }; diff --git a/src/legacy/ui/public/new_platform/__mocks__/helpers.ts b/src/legacy/ui/public/new_platform/__mocks__/helpers.ts deleted file mode 100644 index 35aa8e4830428..0000000000000 --- a/src/legacy/ui/public/new_platform/__mocks__/helpers.ts +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { coreMock } from '../../../../../core/public/mocks'; -import { dataPluginMock } from '../../../../../plugins/data/public/mocks'; -import { embeddablePluginMock } from '../../../../../plugins/embeddable/public/mocks'; -import { navigationPluginMock } from '../../../../../plugins/navigation/public/mocks'; -import { expressionsPluginMock } from '../../../../../plugins/expressions/public/mocks'; -import { inspectorPluginMock } from '../../../../../plugins/inspector/public/mocks'; -import { uiActionsPluginMock } from '../../../../../plugins/ui_actions/public/mocks'; -import { managementPluginMock } from '../../../../../plugins/management/public/mocks'; -import { usageCollectionPluginMock } from '../../../../../plugins/usage_collection/public/mocks'; -import { kibanaLegacyPluginMock } from '../../../../../plugins/kibana_legacy/public/mocks'; -import { chartPluginMock } from '../../../../../plugins/charts/public/mocks'; -import { advancedSettingsMock } from '../../../../../plugins/advanced_settings/public/mocks'; -import { savedObjectsManagementPluginMock } from '../../../../../plugins/saved_objects_management/public/mocks'; -import { visualizationsPluginMock } from '../../../../../plugins/visualizations/public/mocks'; -import { discoverPluginMock } from '../../../../../plugins/discover/public/mocks'; - -export const pluginsMock = { - createSetup: () => ({ - data: dataPluginMock.createSetupContract(), - charts: chartPluginMock.createSetupContract(), - navigation: navigationPluginMock.createSetupContract(), - embeddable: embeddablePluginMock.createSetupContract(), - inspector: inspectorPluginMock.createSetupContract(), - expressions: expressionsPluginMock.createSetupContract(), - uiActions: uiActionsPluginMock.createSetupContract(), - usageCollection: usageCollectionPluginMock.createSetupContract(), - advancedSettings: advancedSettingsMock.createSetupContract(), - visualizations: visualizationsPluginMock.createSetupContract(), - kibanaLegacy: kibanaLegacyPluginMock.createSetupContract(), - savedObjectsManagement: savedObjectsManagementPluginMock.createSetupContract(), - discover: discoverPluginMock.createSetupContract(), - }), - createStart: () => ({ - data: dataPluginMock.createStartContract(), - charts: chartPluginMock.createStartContract(), - navigation: navigationPluginMock.createStartContract(), - embeddable: embeddablePluginMock.createStartContract(), - inspector: inspectorPluginMock.createStartContract(), - expressions: expressionsPluginMock.createStartContract(), - uiActions: uiActionsPluginMock.createStartContract(), - management: managementPluginMock.createStartContract(), - advancedSettings: advancedSettingsMock.createStartContract(), - visualizations: visualizationsPluginMock.createStartContract(), - kibanaLegacy: kibanaLegacyPluginMock.createStartContract(), - savedObjectsManagement: savedObjectsManagementPluginMock.createStartContract(), - discover: discoverPluginMock.createStartContract(), - }), -}; - -export const createUiNewPlatformMock = () => { - const mock = { - npSetup: { - core: coreMock.createSetup(), - plugins: pluginsMock.createSetup(), - }, - npStart: { - core: coreMock.createStart(), - plugins: pluginsMock.createStart(), - }, - }; - return mock; -}; diff --git a/src/legacy/ui/public/new_platform/__mocks__/index.ts b/src/legacy/ui/public/new_platform/__mocks__/index.ts deleted file mode 100644 index d469960ddd7b7..0000000000000 --- a/src/legacy/ui/public/new_platform/__mocks__/index.ts +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { createUiNewPlatformMock } from './helpers'; - -const { npSetup, npStart } = createUiNewPlatformMock(); -export { npSetup, npStart }; diff --git a/src/legacy/ui/public/new_platform/index.ts b/src/legacy/ui/public/new_platform/index.ts deleted file mode 100644 index ca5890854f3aa..0000000000000 --- a/src/legacy/ui/public/new_platform/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -export { __setup__, __start__, npSetup, npStart } from './new_platform'; diff --git a/src/legacy/ui/public/new_platform/new_platform.karma_mock.js b/src/legacy/ui/public/new_platform/new_platform.karma_mock.js deleted file mode 100644 index b8d48b784dba7..0000000000000 --- a/src/legacy/ui/public/new_platform/new_platform.karma_mock.js +++ /dev/null @@ -1,550 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import { getFieldFormatsRegistry } from '../../../../test_utils/public/stub_field_formats'; -import { METRIC_TYPE } from '@kbn/analytics'; -import { setSetupServices, setStartServices } from './set_services'; -import { - AggTypesRegistry, - getAggTypes, - AggConfigs, - // eslint-disable-next-line @kbn/eslint/no-restricted-paths -} from '../../../../../src/plugins/data/common/search/aggs'; -import { ComponentRegistry } from '../../../../../src/plugins/advanced_settings/public/'; -import { UI_SETTINGS } from '../../../../../src/plugins/data/public/'; -import { - CSV_SEPARATOR_SETTING, - CSV_QUOTE_VALUES_SETTING, -} from '../../../../../src/plugins/share/public'; - -const mockObservable = () => { - return { - subscribe: () => {}, - pipe: () => { - return { - subscribe: () => {}, - }; - }, - }; -}; - -const mockComponent = () => { - return null; -}; - -let refreshInterval = undefined; -let isTimeRangeSelectorEnabled = true; -let isAutoRefreshSelectorEnabled = true; - -export const mockUiSettings = { - get: (item, defaultValue) => { - const defaultValues = { - dateFormat: 'MMM D, YYYY @ HH:mm:ss.SSS', - 'dateFormat:tz': 'UTC', - [UI_SETTINGS.SHORT_DOTS_ENABLE]: true, - [UI_SETTINGS.COURIER_IGNORE_FILTER_IF_FIELD_NOT_IN_INDEX]: true, - [UI_SETTINGS.QUERY_ALLOW_LEADING_WILDCARDS]: true, - [UI_SETTINGS.QUERY_STRING_OPTIONS]: {}, - [UI_SETTINGS.FORMAT_CURRENCY_DEFAULT_PATTERN]: '($0,0.[00])', - [UI_SETTINGS.FORMAT_NUMBER_DEFAULT_PATTERN]: '0,0.[000]', - [UI_SETTINGS.FORMAT_PERCENT_DEFAULT_PATTERN]: '0,0.[000]%', - [UI_SETTINGS.FORMAT_NUMBER_DEFAULT_LOCALE]: 'en', - [UI_SETTINGS.FORMAT_DEFAULT_TYPE_MAP]: {}, - [CSV_SEPARATOR_SETTING]: ',', - [CSV_QUOTE_VALUES_SETTING]: true, - [UI_SETTINGS.SEARCH_QUERY_LANGUAGE]: 'kuery', - 'state:storeInSessionStorage': false, - }; - - return defaultValues[item] || defaultValue; - }, - getUpdate$: () => ({ - subscribe: sinon.fake(), - }), - isDefault: sinon.fake(), -}; - -const mockCoreSetup = { - chrome: {}, - http: { - basePath: { - get: sinon.fake.returns(''), - }, - }, - injectedMetadata: {}, - uiSettings: mockUiSettings, -}; - -const mockCoreStart = { - application: { - capabilities: {}, - }, - chrome: { - overlays: { - openModal: sinon.fake(), - }, - }, - http: { - basePath: { - get: sinon.fake.returns(''), - }, - }, - notifications: { - toasts: {}, - }, - i18n: {}, - overlays: {}, - savedObjects: { - client: {}, - }, - uiSettings: mockUiSettings, -}; - -const querySetup = { - state$: mockObservable(), - filterManager: { - getFetches$: sinon.fake(), - getFilters: sinon.fake(), - getAppFilters: sinon.fake(), - getGlobalFilters: sinon.fake(), - removeFilter: sinon.fake(), - addFilters: sinon.fake(), - setFilters: sinon.fake(), - removeAll: sinon.fake(), - getUpdates$: mockObservable, - }, - timefilter: { - timefilter: { - getFetch$: mockObservable, - getAutoRefreshFetch$: mockObservable, - getEnabledUpdated$: mockObservable, - getTimeUpdate$: mockObservable, - getRefreshIntervalUpdate$: mockObservable, - isTimeRangeSelectorEnabled: () => { - return isTimeRangeSelectorEnabled; - }, - isAutoRefreshSelectorEnabled: () => { - return isAutoRefreshSelectorEnabled; - }, - disableAutoRefreshSelector: () => { - isAutoRefreshSelectorEnabled = false; - }, - enableAutoRefreshSelector: () => { - isAutoRefreshSelectorEnabled = true; - }, - getRefreshInterval: () => { - return refreshInterval; - }, - setRefreshInterval: (interval) => { - refreshInterval = interval; - }, - enableTimeRangeSelector: () => { - isTimeRangeSelectorEnabled = true; - }, - disableTimeRangeSelector: () => { - isTimeRangeSelectorEnabled = false; - }, - getTime: sinon.fake(), - setTime: sinon.fake(), - getActiveBounds: sinon.fake(), - getBounds: sinon.fake(), - calculateBounds: sinon.fake(), - createFilter: sinon.fake(), - }, - history: sinon.fake(), - }, - savedQueries: { - saveQuery: sinon.fake(), - getAllSavedQueries: sinon.fake(), - findSavedQueries: sinon.fake(), - getSavedQuery: sinon.fake(), - deleteSavedQuery: sinon.fake(), - getSavedQueryCount: sinon.fake(), - }, -}; - -const mockAggTypesRegistry = () => { - const registry = new AggTypesRegistry(); - const registrySetup = registry.setup(); - const aggTypes = getAggTypes({ - calculateBounds: sinon.fake(), - getConfig: sinon.fake(), - getFieldFormatsStart: () => ({ - deserialize: sinon.fake(), - getDefaultInstance: sinon.fake(), - }), - isDefaultTimezone: () => true, - }); - aggTypes.buckets.forEach((type) => registrySetup.registerBucket(type)); - aggTypes.metrics.forEach((type) => registrySetup.registerMetric(type)); - - return registry; -}; - -const aggTypesRegistry = mockAggTypesRegistry(); - -export const npSetup = { - core: mockCoreSetup, - plugins: { - advancedSettings: { - component: { - register: sinon.fake(), - componentType: ComponentRegistry.componentType, - }, - }, - usageCollection: { - allowTrackUserAgent: sinon.fake(), - reportUiStats: sinon.fake(), - METRIC_TYPE, - }, - embeddable: { - registerEmbeddableFactory: sinon.fake(), - }, - expressions: { - registerFunction: sinon.fake(), - registerRenderer: sinon.fake(), - registerType: sinon.fake(), - }, - data: { - autocomplete: { - addProvider: sinon.fake(), - getProvider: sinon.fake(), - }, - query: querySetup, - search: { - aggs: { - types: aggTypesRegistry.setup(), - }, - __LEGACY: { - esClient: { - search: sinon.fake(), - msearch: sinon.fake(), - }, - }, - }, - fieldFormats: getFieldFormatsRegistry(mockCoreSetup), - }, - share: { - register: () => {}, - urlGenerators: { - registerUrlGenerator: () => {}, - }, - }, - devTools: { - register: () => {}, - }, - kibanaLegacy: { - registerLegacyApp: () => {}, - forwardApp: () => {}, - config: { - defaultAppId: 'home', - }, - }, - inspector: { - registerView: () => undefined, - __LEGACY: { - views: { - register: () => undefined, - }, - }, - }, - uiActions: { - attachAction: sinon.fake(), - registerAction: sinon.fake(), - registerTrigger: sinon.fake(), - }, - home: { - featureCatalogue: { - register: sinon.fake(), - }, - environment: { - update: sinon.fake(), - }, - config: { - disableWelcomeScreen: false, - }, - tutorials: { - setVariable: sinon.fake(), - }, - }, - charts: { - theme: { - chartsTheme$: mockObservable, - useChartsTheme: sinon.fake(), - }, - colors: { - seedColors: ['white', 'black'], - }, - }, - management: { - sections: { - getSection: () => ({ - registerApp: sinon.fake(), - }), - }, - }, - indexPatternManagement: { - list: { addListConfig: sinon.fake() }, - creation: { addCreationConfig: sinon.fake() }, - }, - discover: { - docViews: { - addDocView: sinon.fake(), - setAngularInjectorGetter: sinon.fake(), - }, - }, - visTypeVega: { - config: sinon.fake(), - }, - visualizations: { - createBaseVisualization: sinon.fake(), - createReactVisualization: sinon.fake(), - registerAlias: sinon.fake(), - hideTypes: sinon.fake(), - }, - - mapsLegacy: { - serviceSettings: sinon.fake(), - getPrecision: sinon.fake(), - getZoomPrecision: sinon.fake(), - }, - }, -}; - -export const npStart = { - core: mockCoreStart, - plugins: { - management: { - legacy: { - getSection: () => ({ - register: sinon.fake(), - deregister: sinon.fake(), - hasItem: sinon.fake(), - }), - }, - sections: { - getSection: () => ({ - registerApp: sinon.fake(), - }), - }, - }, - indexPatternManagement: { - list: { - getType: sinon.fake(), - getIndexPatternCreationOptions: sinon.fake(), - }, - creation: { - getIndexPatternTags: sinon.fake(), - getFieldInfo: sinon.fake(), - areScriptedFieldsEnabled: sinon.fake(), - }, - }, - embeddable: { - getEmbeddableFactory: sinon.fake(), - getEmbeddableFactories: sinon.fake(), - registerEmbeddableFactory: sinon.fake(), - }, - expressions: { - registerFunction: sinon.fake(), - registerRenderer: sinon.fake(), - registerType: sinon.fake(), - }, - kibanaLegacy: { - getForwards: () => [], - loadFontAwesome: () => {}, - config: { - defaultAppId: 'home', - }, - dashboardConfig: { - turnHideWriteControlsOn: sinon.fake(), - getHideWriteControls: sinon.fake(), - }, - }, - dashboard: { - getSavedDashboardLoader: sinon.fake(), - }, - data: { - actions: { - createFiltersFromValueClickAction: Promise.resolve(['yes']), - createFiltersFromRangeSelectAction: sinon.fake(), - }, - autocomplete: { - getProvider: sinon.fake(), - }, - getSuggestions: sinon.fake(), - indexPatterns: { - get: sinon.spy((indexPatternId) => - Promise.resolve({ - id: indexPatternId, - isTimeNanosBased: () => false, - popularizeField: () => {}, - }) - ), - }, - ui: { - IndexPatternSelect: mockComponent, - SearchBar: mockComponent, - }, - query: { - filterManager: { - getFetches$: sinon.fake(), - getFilters: sinon.fake(), - getAppFilters: sinon.fake(), - getGlobalFilters: sinon.fake(), - removeFilter: sinon.fake(), - addFilters: sinon.fake(), - setFilters: sinon.fake(), - removeAll: sinon.fake(), - getUpdates$: mockObservable, - }, - timefilter: { - timefilter: { - getFetch$: mockObservable, - getAutoRefreshFetch$: mockObservable, - getEnabledUpdated$: mockObservable, - getTimeUpdate$: mockObservable, - getRefreshIntervalUpdate$: mockObservable, - isTimeRangeSelectorEnabled: () => { - return isTimeRangeSelectorEnabled; - }, - isAutoRefreshSelectorEnabled: () => { - return isAutoRefreshSelectorEnabled; - }, - disableAutoRefreshSelector: () => { - isAutoRefreshSelectorEnabled = false; - }, - enableAutoRefreshSelector: () => { - isAutoRefreshSelectorEnabled = true; - }, - getRefreshInterval: () => { - return refreshInterval; - }, - setRefreshInterval: (interval) => { - refreshInterval = interval; - }, - enableTimeRangeSelector: () => { - isTimeRangeSelectorEnabled = true; - }, - disableTimeRangeSelector: () => { - isTimeRangeSelectorEnabled = false; - }, - getTime: sinon.fake(), - setTime: sinon.fake(), - getActiveBounds: sinon.fake(), - getBounds: sinon.fake(), - calculateBounds: sinon.fake(), - createFilter: sinon.fake(), - }, - history: sinon.fake(), - }, - }, - search: { - aggs: { - calculateAutoTimeExpression: sinon.fake(), - createAggConfigs: (indexPattern, configStates = []) => { - return new AggConfigs(indexPattern, configStates, { - typesRegistry: aggTypesRegistry.start(), - fieldFormats: getFieldFormatsRegistry(mockCoreStart), - }); - }, - types: aggTypesRegistry.start(), - }, - __LEGACY: { - esClient: { - search: sinon.fake(), - msearch: sinon.fake(), - }, - }, - }, - fieldFormats: getFieldFormatsRegistry(mockCoreStart), - }, - share: { - toggleShareContextMenu: () => {}, - }, - inspector: { - isAvailable: () => false, - open: () => ({ - onClose: Promise.resolve(undefined), - close: () => Promise.resolve(undefined), - }), - }, - uiActions: { - attachAction: sinon.fake(), - registerAction: sinon.fake(), - registerTrigger: sinon.fake(), - detachAction: sinon.fake(), - executeTriggerActions: sinon.fake(), - getTrigger: sinon.fake(), - getTriggerActions: sinon.fake(), - getTriggerCompatibleActions: sinon.fake(), - }, - visualizations: { - get: sinon.fake(), - all: sinon.fake(), - getAliases: sinon.fake(), - savedVisualizationsLoader: {}, - showNewVisModal: sinon.fake(), - createVis: sinon.fake(), - convertFromSerializedVis: sinon.fake(), - convertToSerializedVis: sinon.fake(), - }, - navigation: { - ui: { - TopNavMenu: mockComponent, - }, - }, - charts: { - theme: { - chartsTheme$: mockObservable, - useChartsTheme: sinon.fake(), - }, - }, - discover: { - docViews: { - DocViewer: () => null, - }, - savedSearchLoader: {}, - }, - }, -}; - -export function __setup__(coreSetup) { - npSetup.core = coreSetup; - - // no-op application register calls (this is overwritten to - // bootstrap an LP plugin outside of tests) - npSetup.core.application.register = () => {}; - - npSetup.core.uiSettings.get = mockUiSettings.get; - - // Services that need to be set in the legacy platform since the legacy data - // & vis plugins which previously provided them have been removed. - setSetupServices(npSetup); -} - -export function __start__(coreStart) { - npStart.core = coreStart; - - npStart.core.uiSettings.get = mockUiSettings.get; - - // Services that need to be set in the legacy platform since the legacy data - // & vis plugins which previously provided them have been removed. - setStartServices(npStart); -} diff --git a/src/legacy/ui/public/new_platform/new_platform.test.mocks.ts b/src/legacy/ui/public/new_platform/new_platform.test.mocks.ts deleted file mode 100644 index f44efe17ef8ee..0000000000000 --- a/src/legacy/ui/public/new_platform/new_platform.test.mocks.ts +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { scopedHistoryMock } from '../../../../core/public/mocks'; - -export const setRootControllerMock = jest.fn(); - -jest.doMock('ui/chrome', () => ({ - setRootController: setRootControllerMock, -})); - -export const historyMock = scopedHistoryMock.create(); -jest.doMock('../../../../core/public', () => ({ - ScopedHistory: jest.fn(() => historyMock), -})); diff --git a/src/legacy/ui/public/new_platform/new_platform.test.ts b/src/legacy/ui/public/new_platform/new_platform.test.ts deleted file mode 100644 index d515c348ca440..0000000000000 --- a/src/legacy/ui/public/new_platform/new_platform.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -jest.mock('history'); - -import { setRootControllerMock, historyMock } from './new_platform.test.mocks'; -import { legacyAppRegister, __reset__, __setup__, __start__ } from './new_platform'; -import { coreMock } from '../../../../core/public/mocks'; -import { AppMount } from '../../../../core/public'; - -describe('ui/new_platform', () => { - describe('legacyAppRegister', () => { - beforeEach(() => { - setRootControllerMock.mockReset(); - __reset__(); - __setup__(coreMock.createSetup({ basePath: '/test/base/path' }) as any, {} as any); - }); - - const registerApp = () => { - const unmountMock = jest.fn(); - const mountMock = jest.fn, Parameters>(() => unmountMock); - legacyAppRegister({ - id: 'test', - title: 'Test', - mount: mountMock, - }); - return { mountMock, unmountMock }; - }; - - test('sets ui/chrome root controller', () => { - registerApp(); - expect(setRootControllerMock).toHaveBeenCalledWith('test', expect.any(Function)); - }); - - test('throws if called more than once', () => { - registerApp(); - expect(registerApp).toThrowErrorMatchingInlineSnapshot( - `"core.application.register may only be called once for legacy plugins."` - ); - }); - - test('controller calls app.mount when invoked', () => { - const { mountMock } = registerApp(); - const controller = setRootControllerMock.mock.calls[0][1]; - const scopeMock = { $on: jest.fn() }; - const elementMock = [document.createElement('div')]; - - controller(scopeMock, elementMock); - expect(mountMock).toHaveBeenCalledWith({ - element: expect.any(HTMLElement), - appBasePath: '/test/base/path/app/test', - onAppLeave: expect.any(Function), - history: historyMock, - }); - }); - - test('app is mounted in new div inside containing element', () => { - const { mountMock } = registerApp(); - const controller = setRootControllerMock.mock.calls[0][1]; - const scopeMock = { $on: jest.fn() }; - const elementMock = [document.createElement('div')]; - - controller(scopeMock, elementMock); - - const { element } = mountMock.mock.calls[0][0]; - expect(element.parentElement).toEqual(elementMock[0]); - }); - - test('controller calls deprecated context app.mount when invoked', () => { - const unmountMock = jest.fn(); - // Two arguments changes how this is called. - const mountMock = jest.fn((context, params) => unmountMock); - legacyAppRegister({ - id: 'test', - title: 'Test', - mount: mountMock, - }); - const controller = setRootControllerMock.mock.calls[0][1]; - const scopeMock = { $on: jest.fn() }; - const elementMock = [document.createElement('div')]; - - controller(scopeMock, elementMock); - expect(mountMock).toHaveBeenCalledWith(expect.any(Object), { - element: expect.any(HTMLElement), - appBasePath: '/test/base/path/app/test', - onAppLeave: expect.any(Function), - history: historyMock, - }); - }); - - test('controller calls unmount when $scope.$destroy', async () => { - const { unmountMock } = registerApp(); - const controller = setRootControllerMock.mock.calls[0][1]; - const scopeMock = { $on: jest.fn() }; - const elementMock = [document.createElement('div')]; - - controller(scopeMock, elementMock); - // Flush promise queue. Must be done this way because the controller cannot return a Promise without breaking - // angular. - await new Promise((resolve) => setTimeout(resolve, 1)); - - const [event, eventHandler] = scopeMock.$on.mock.calls[0]; - expect(event).toEqual('$destroy'); - eventHandler(); - expect(unmountMock).toHaveBeenCalled(); - }); - }); -}); diff --git a/src/legacy/ui/public/new_platform/new_platform.ts b/src/legacy/ui/public/new_platform/new_platform.ts deleted file mode 100644 index 37787ffbde4fc..0000000000000 --- a/src/legacy/ui/public/new_platform/new_platform.ts +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { IScope } from 'angular'; - -import { UiActionsStart, UiActionsSetup } from 'src/plugins/ui_actions/public'; -import { EmbeddableStart, EmbeddableSetup } from 'src/plugins/embeddable/public'; -import { createBrowserHistory } from 'history'; -import { VisTypeXyPluginSetup } from 'src/plugins/vis_type_xy/public'; -import { DashboardStart } from '../../../../plugins/dashboard/public'; -import { setSetupServices, setStartServices } from './set_services'; -import { - LegacyCoreSetup, - LegacyCoreStart, - App, - AppMountDeprecated, - ScopedHistory, -} from '../../../../core/public'; -import { Plugin as DataPlugin } from '../../../../plugins/data/public'; -import { Plugin as ExpressionsPlugin } from '../../../../plugins/expressions/public'; -import { - Setup as InspectorSetup, - Start as InspectorStart, -} from '../../../../plugins/inspector/public'; -import { ChartsPluginSetup, ChartsPluginStart } from '../../../../plugins/charts/public'; -import { DevToolsSetup } from '../../../../plugins/dev_tools/public'; -import { KibanaLegacySetup, KibanaLegacyStart } from '../../../../plugins/kibana_legacy/public'; -import { HomePublicPluginSetup } from '../../../../plugins/home/public'; -import { SharePluginSetup, SharePluginStart } from '../../../../plugins/share/public'; -import { - AdvancedSettingsSetup, - AdvancedSettingsStart, -} from '../../../../plugins/advanced_settings/public'; -import { ManagementSetup, ManagementStart } from '../../../../plugins/management/public'; -import { - IndexPatternManagementSetup, - IndexPatternManagementStart, -} from '../../../../plugins/index_pattern_management/public'; -import { BfetchPublicSetup, BfetchPublicStart } from '../../../../plugins/bfetch/public'; -import { UsageCollectionSetup } from '../../../../plugins/usage_collection/public'; -import { TelemetryPluginSetup, TelemetryPluginStart } from '../../../../plugins/telemetry/public'; -import { - NavigationPublicPluginSetup, - NavigationPublicPluginStart, -} from '../../../../plugins/navigation/public'; -import { DiscoverSetup, DiscoverStart } from '../../../../plugins/discover/public'; -import { - SavedObjectsManagementPluginSetup, - SavedObjectsManagementPluginStart, -} from '../../../../plugins/saved_objects_management/public'; -import { - VisualizationsSetup, - VisualizationsStart, -} from '../../../../plugins/visualizations/public'; -import { VisTypeTimelionPluginStart } from '../../../../plugins/vis_type_timelion/public'; -import { MapsLegacyPluginSetup } from '../../../../plugins/maps_legacy/public'; - -export interface PluginsSetup { - bfetch: BfetchPublicSetup; - charts: ChartsPluginSetup; - data: ReturnType; - embeddable: EmbeddableSetup; - expressions: ReturnType; - home: HomePublicPluginSetup; - inspector: InspectorSetup; - uiActions: UiActionsSetup; - navigation: NavigationPublicPluginSetup; - devTools: DevToolsSetup; - kibanaLegacy: KibanaLegacySetup; - share: SharePluginSetup; - usageCollection: UsageCollectionSetup; - advancedSettings: AdvancedSettingsSetup; - management: ManagementSetup; - discover: DiscoverSetup; - visualizations: VisualizationsSetup; - telemetry?: TelemetryPluginSetup; - savedObjectsManagement: SavedObjectsManagementPluginSetup; - mapsLegacy: MapsLegacyPluginSetup; - indexPatternManagement: IndexPatternManagementSetup; - visTypeXy?: VisTypeXyPluginSetup; -} - -export interface PluginsStart { - bfetch: BfetchPublicStart; - charts: ChartsPluginStart; - data: ReturnType; - embeddable: EmbeddableStart; - expressions: ReturnType; - inspector: InspectorStart; - uiActions: UiActionsStart; - navigation: NavigationPublicPluginStart; - kibanaLegacy: KibanaLegacyStart; - share: SharePluginStart; - management: ManagementStart; - advancedSettings: AdvancedSettingsStart; - discover: DiscoverStart; - visualizations: VisualizationsStart; - telemetry?: TelemetryPluginStart; - dashboard: DashboardStart; - savedObjectsManagement: SavedObjectsManagementPluginStart; - visTypeTimelion: VisTypeTimelionPluginStart; - indexPatternManagement: IndexPatternManagementStart; -} - -export const npSetup = { - core: (null as unknown) as LegacyCoreSetup, - plugins: {} as PluginsSetup, -}; - -export const npStart = { - core: (null as unknown) as LegacyCoreStart, - plugins: {} as PluginsStart, -}; - -/** - * Only used by unit tests - * @internal - */ -export function __reset__() { - npSetup.core = (null as unknown) as LegacyCoreSetup; - npSetup.plugins = {} as any; - npStart.core = (null as unknown) as LegacyCoreStart; - npStart.plugins = {} as any; - legacyAppRegistered = false; -} - -export function __setup__(coreSetup: LegacyCoreSetup, plugins: PluginsSetup) { - npSetup.core = coreSetup; - npSetup.plugins = plugins; - - // Setup compatibility layer for AppService in legacy platform - npSetup.core.application.register = legacyAppRegister; - - // Services that need to be set in the legacy platform since the legacy data - // & vis plugins which previously provided them have been removed. - setSetupServices(npSetup); -} - -export function __start__(coreStart: LegacyCoreStart, plugins: PluginsStart) { - npStart.core = coreStart; - npStart.plugins = plugins; - - // Services that need to be set in the legacy platform since the legacy data - // & vis plugins which previously provided them have been removed. - setStartServices(npStart); -} - -/** Flag used to ensure `legacyAppRegister` is only called once. */ -let legacyAppRegistered = false; - -/** - * Exported for testing only. Use `npSetup.core.application.register` in legacy apps. - * @internal - */ -export const legacyAppRegister = (app: App) => { - if (legacyAppRegistered) { - throw new Error(`core.application.register may only be called once for legacy plugins.`); - } - legacyAppRegistered = true; - - // eslint-disable-next-line @typescript-eslint/no-var-requires - require('ui/chrome').setRootController(app.id, ($scope: IScope, $element: JQLite) => { - const element = document.createElement('div'); - $element[0].appendChild(element); - - // Root controller cannot return a Promise so use an internal async function and call it immediately - (async () => { - const appRoute = app.appRoute || `/app/${app.id}`; - const appBasePath = npSetup.core.http.basePath.prepend(appRoute); - const params = { - element, - appBasePath, - history: new ScopedHistory( - createBrowserHistory({ basename: npSetup.core.http.basePath.get() }), - appRoute - ), - onAppLeave: () => undefined, - }; - const unmount = isAppMountDeprecated(app.mount) - ? await app.mount({ core: npStart.core }, params) - : await app.mount(params); - $scope.$on('$destroy', () => { - unmount(); - }); - })(); - }); -}; - -function isAppMountDeprecated(mount: (...args: any[]) => any): mount is AppMountDeprecated { - // Mount functions with two arguments are assumed to expect deprecated `context` object. - return mount.length === 2; -} diff --git a/src/legacy/ui/public/new_platform/set_services.test.ts b/src/legacy/ui/public/new_platform/set_services.test.ts deleted file mode 100644 index 74e789ec220b1..0000000000000 --- a/src/legacy/ui/public/new_platform/set_services.test.ts +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { __reset__, __setup__, __start__, PluginsSetup, PluginsStart } from './new_platform'; -import * as dataServices from '../../../../plugins/data/public/services'; -import * as visualizationsServices from '../../../../plugins/visualizations/public/services'; -import { LegacyCoreSetup, LegacyCoreStart } from '../../../../core/public'; -import { coreMock } from '../../../../core/public/mocks'; -import { npSetup, npStart } from './__mocks__'; - -describe('ui/new_platform', () => { - describe('set service getters', () => { - const testServiceGetters = (name: string, services: Record) => { - const getters = Object.keys(services).filter((k) => k.substring(0, 3) === 'get'); - getters.forEach((g) => { - it(`ui/new_platform sets a value for ${name} getter ${g}`, () => { - __reset__(); - __setup__( - (coreMock.createSetup() as unknown) as LegacyCoreSetup, - (npSetup.plugins as unknown) as PluginsSetup - ); - __start__( - (coreMock.createStart() as unknown) as LegacyCoreStart, - (npStart.plugins as unknown) as PluginsStart - ); - - expect(services[g]()).toBeDefined(); - }); - }); - }; - - testServiceGetters('data', dataServices); - testServiceGetters('visualizations', visualizationsServices); - }); -}); diff --git a/src/legacy/ui/public/new_platform/set_services.ts b/src/legacy/ui/public/new_platform/set_services.ts deleted file mode 100644 index 036157a9f3fbc..0000000000000 --- a/src/legacy/ui/public/new_platform/set_services.ts +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { pick } from 'lodash'; - -import { PluginsSetup, PluginsStart } from './new_platform'; -import { LegacyCoreSetup, LegacyCoreStart } from '../../../../core/public'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import * as dataServices from '../../../../plugins/data/public/services'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import * as visualizationsServices from '../../../../plugins/visualizations/public/services'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { createSavedVisLoader } from '../../../../plugins/visualizations/public/saved_visualizations/saved_visualizations'; - -interface NpSetup { - core: LegacyCoreSetup; - plugins: PluginsSetup; -} - -interface NpStart { - core: LegacyCoreStart; - plugins: PluginsStart; -} - -export function setSetupServices(npSetup: NpSetup) { - // Services that need to be set in the legacy platform since the legacy data plugin - // which previously provided them has been removed. - visualizationsServices.setUISettings(npSetup.core.uiSettings); - visualizationsServices.setUsageCollector(npSetup.plugins.usageCollection); -} - -export function setStartServices(npStart: NpStart) { - // Services that need to be set in the legacy platform since the legacy data plugin - // which previously provided them has been removed. - dataServices.setNotifications(npStart.core.notifications); - dataServices.setOverlays(npStart.core.overlays); - dataServices.setUiSettings(npStart.core.uiSettings); - dataServices.setFieldFormats(npStart.plugins.data.fieldFormats); - dataServices.setIndexPatterns(npStart.plugins.data.indexPatterns); - dataServices.setQueryService(npStart.plugins.data.query); - dataServices.setSearchService(npStart.plugins.data.search); - - visualizationsServices.setI18n(npStart.core.i18n); - visualizationsServices.setTypes( - pick(npStart.plugins.visualizations, ['get', 'all', 'getAliases']) - ); - visualizationsServices.setCapabilities(npStart.core.application.capabilities); - visualizationsServices.setHttp(npStart.core.http); - visualizationsServices.setApplication(npStart.core.application); - visualizationsServices.setEmbeddable(npStart.plugins.embeddable); - visualizationsServices.setSavedObjects(npStart.core.savedObjects); - visualizationsServices.setIndexPatterns(npStart.plugins.data.indexPatterns); - visualizationsServices.setFilterManager(npStart.plugins.data.query.filterManager); - visualizationsServices.setExpressions(npStart.plugins.expressions); - visualizationsServices.setUiActions(npStart.plugins.uiActions); - visualizationsServices.setTimeFilter(npStart.plugins.data.query.timefilter.timefilter); - visualizationsServices.setAggs(npStart.plugins.data.search.aggs); - visualizationsServices.setOverlays(npStart.core.overlays); - visualizationsServices.setChrome(npStart.core.chrome); - visualizationsServices.setSearch(npStart.plugins.data.search); - const savedVisualizationsLoader = createSavedVisLoader({ - savedObjectsClient: npStart.core.savedObjects.client, - indexPatterns: npStart.plugins.data.indexPatterns, - search: npStart.plugins.data.search, - chrome: npStart.core.chrome, - overlays: npStart.core.overlays, - visualizationTypes: visualizationsServices.getTypes(), - }); - visualizationsServices.setSavedVisualizationsLoader(savedVisualizationsLoader); - visualizationsServices.setSavedSearchLoader(npStart.plugins.discover.savedSearchLoader); -} diff --git a/src/legacy/ui/public/notify/banners/BANNERS.md b/src/legacy/ui/public/notify/banners/BANNERS.md deleted file mode 100644 index fc6bddc3aa4ed..0000000000000 --- a/src/legacy/ui/public/notify/banners/BANNERS.md +++ /dev/null @@ -1,360 +0,0 @@ -# Banners - -Use this service to surface banners at the top of the screen. The expectation is that the banner will used an -`` to render, but that is not a requirement. See [the EUI docs](https://elastic.github.io/eui/) for -more information on banners and their role within the UI. - -Banners should be considered with respect to their lifecycle. Most banners are best served by using the `add` and -`remove` functions. - -## Importing the module - -```js -import { banners } from 'ui/notify'; -``` - -## Interface - -There are three methods defined to manipulate the list of banners: `add`, `set`, and `remove`. A fourth method, -`onChange` exists to listen to changes made via `add`, `set`, and `remove`. - -### `add()` - -This is the preferred way to add banners because it implies the best usage of the banner: added once during a page's -lifecycle. For other usages, consider *not* using a banner. - -#### Syntax - -```js -const bannerId = banners.add({ - // required: - component, - // optional: - priority, -}); -``` - -##### Parameters - -| Field | Type | Description | -|-------|------|-------------| -| `component` | Any | The value displayed as the banner. | -| `priority` | Number | Optional priority, which defaults to `0` used to place the banner. | - -To add a banner, you only need to define the `component` field. - -The `priority` sorts in descending order. Items sharing the same priority are sorted from oldest to newest. For example: - -```js -const banner1 = banners.add({ component: }); -const banner2 = banners.add({ component: , priority: 0 }); -const banner3 = banners.add({ component: , priority: 1 }); -``` - -That would be displayed as: - -``` -[ fake3 ] -[ fake1 ] -[ fake2 ] -``` - -##### Returns - -| Type | Description | -|------|-------------| -| String | A newly generated ID. | - -#### Example - -This example includes buttons that allow the user to remove the banner. In some cases, you may not want any buttons -and in other cases you will want an action to proceed the banner's removal (e.g., apply an Advanced Setting). - -This makes the most sense to use when a banner is added at the beginning of the page life cycle and not expected to -be touched, except by its own buttons triggering an action or navigating away. - -```js -const bannerId = banners.add({ - component: ( - - - - banners.remove(bannerId)} - > - Dismiss - - - - window.alert('Do Something Else')} - > - Do Something Else - - - - - ), -}); -``` - -### `remove()` - -Unlike toast notifications, banners stick around until they are explicitly removed. Using the `add` example above,you can remove it by calling `remove`. - -Note: They will stick around as long as the scope is remembered by whatever set it; navigating away won't remove it -unless the scope is forgotten (e.g., when the "app" changes)! - -#### Syntax - -```js -const removed = banners.remove(bannerId); -``` - -##### Parameters - -| Field | Type | Description | -|-------|------|-------------| -| `id` | String | ID of a banner. | - -##### Returns - -| Type | Description | -|------|-------------| -| Boolean | `true` if the ID was recognized and the banner was removed. `false` otherwise. | - -#### Example - -To remove a banner, you need to pass the `id` of the banner. - -```js -if (banners.remove(bannerId)) { - // removed; otherwise it didn't exist (maybe it was already removed) -} -``` - -#### Scheduled removal - -Like toast notifications do automatically, you can have a banner automatically removed after a set of time, by -setting a timer: - -```js -setTimeout(() => banners.remove(bannerId), 15000); -``` - -Note: It is safe to remove a banner more than once as unknown IDs will be ignored. - -### `set()` - -Banners can be replaced once added by supplying their `id`. If one is supplied, then the ID will be used to replace -any banner with the same ID and a **new** `id` will be returned. - -You should only consider using `set` when the banner is manipulated frequently in the lifecycle of the page, where -maintaining the banner's `id` can be a burden. It is easier to allow `banners` to create the ID for you in most -situations where a banner is useful (e.g., set once), which safely avoids any chance to have an ID-based collision, -which happens automatically with `add`. - -Usage of `set` can imply that your use case is abusing the banner system. - -Note: `set` will only trigger the callback once for both the implicit remove and add operation. - -#### Syntax - -```js -const id = banners.set({ - // required: - component, - // optional: - id, - priority, -}); -``` - -##### Parameters - -| Field | Type | Description | -|-------|------|-------------| -| `component` | Any | The value displayed as the banner. | -| `id` | String | Optional ID used to remove an existing banner. | -| `priority` | Number | Optional priority, which defaults to `0` used to place the banner. | - -The `id` is optional because it follows the same semantics as the `remove` method: unknown IDs are ignored. This -is useful when first creating a banner so that you do not have to call `add` instead. - -##### Returns - -| Type | Description | -|------|-------------| -| String | A newly generated ID. | - -#### Example - -This example does not include any way for the user to clear the banner directly. Instead, it is cleared based on -time. Related to it being cleared by time, it can also reappear within the same page life cycle by navigating between -different paths that need it displayed. Instead of adding a new banner for every navigation, you should replace any -existing banner. - -```js -let bannerId; -let timeoutId; - -function displayBanner() { - clearTimeout(timeoutId); - - bannerId = banners.set({ - id: bannerId, // the first time it will be undefined, but reused as long as this is in the same lifecycle - component: ( - - ) - }); - - // hide the message after the user has had a chance to acknowledge it -- so it doesn't permanently stick around - banner.timeoutId = setTimeout(() => { - banners.remove(bannerId); - timeoutId = undefined; - }, 6000); -} -``` - -### `onChange()` - -For React components that intend to display the banners, it is not enough to simply `render` the `banners.list` -values. Because they can change after being rendered, the React component that renders the list must be alerted -to changes to the list. - -#### Syntax - -```js -// inside your React component -banners.onChange(() => this.forceUpdate()); -``` - -##### Parameters - -| Field | Type | Description | -|-------|------|-------------| -| `callback` | Function | The function to invoke whenever the internal banner list is changed. | - -Every new `callback` replaces the previous callback. So calling this with `null` or `undefined` will unset the -callback. - -##### Returns - -Nothing. - -#### Example - -This can be used inside of a React component to trigger a re-`render` of the banners. - -```js -import { GlobalBannerList } from 'ui/notify'; - - -``` - -### `list` - -For React components that intend to display the banners, it is not enough to simply `render` the `banners.list` -values. Because they can change after being rendered, the React component that renders the list must be alerted -to changes to the list. - -#### Syntax - -```js - -``` - -##### Returns - -| Type | Description | -|------|-------------| -| Array | The array of banner objects. | - -Banner objects are sorted in descending order based on their `priority`, in the form: - -```js -{ - id: 'banner-123', - component: , - priority: 12, -} -``` - -| Field | Type | Description | -|-------|------|-------------| -| `component` | Any | The value displayed as the banner. | -| `id` | String | The ID of the banner, which can be used as a React "key". | -| `priority` | Number | The priority of the banner. | - -#### Example - -This can be used to supply the banners to the `GlobalBannerList` React component (which is done for you). - -```js -import { GlobalBannerList } from 'ui/notify'; - - -``` - -## Use in functional tests - -Functional tests are commonly used to verify that an action yielded a successful outcome. You can place a -`data-test-subj` attribute on the banner and use it to check if the banner exists inside of your functional test. -This acts as a proxy for verifying the successful outcome. Any unrecognized field will be added as a property of the -containing element. - -```js -banners.add({ - component: ( - - ), - data-test-subj: 'my-tested-banner', -}); -``` - -This will apply the `data-test-subj` to the element containing the `component`, so the inner HTML of that element -will exclusively be the specified `component`. - -Given that `component` is expected to be a React component, you could also add the `data-test-subj` directly to it: - -```js -banners.add({ - component: ( - - ), -}); -``` \ No newline at end of file diff --git a/src/legacy/ui/public/notify/banners/banners.tsx b/src/legacy/ui/public/notify/banners/banners.tsx deleted file mode 100644 index 777e408b8dfc4..0000000000000 --- a/src/legacy/ui/public/notify/banners/banners.tsx +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React from 'react'; -import ReactDOM from 'react-dom'; -import { npStart } from 'ui/new_platform'; -import { I18nProvider } from '@kbn/i18n/react'; - -const npBanners = npStart.core.overlays.banners; - -/** compatibility layer for new platform */ -const mountForComponent = (component: React.ReactElement) => (element: HTMLElement) => { - ReactDOM.render({component}, element); - return () => ReactDOM.unmountComponentAtNode(element); -}; - -/** - * Banners represents a prioritized list of displayed components. - */ -export class Banners { - /** - * Add a new banner. - * - * @param {Object} component The React component to display. - * @param {Number} priority The optional priority order to display this banner. Higher priority values are shown first. - * @return {String} A newly generated ID. This value can be used to remove/replace the banner. - */ - add = ({ component, priority }: { component: React.ReactElement; priority?: number }) => { - return npBanners.add(mountForComponent(component), priority); - }; - - /** - * Remove an existing banner. - * - * @param {String} id The ID of the banner to remove. - * @return {Boolean} {@code true} if the ID is recognized and the banner is removed. {@code false} otherwise. - */ - remove = (id: string): boolean => { - return npBanners.remove(id); - }; - - /** - * Replace an existing banner by removing it, if it exists, and adding a new one in its place. - * - * This is similar to calling banners.remove, followed by banners.add, except that it only notifies the listener - * after adding. - * - * @param {Object} component The React component to display. - * @param {String} id The ID of the Banner to remove. - * @param {Number} priority The optional priority order to display this banner. Higher priority values are shown first. - * @return {String} A newly generated ID. This value can be used to remove/replace the banner. - */ - set = ({ - component, - id, - priority = 0, - }: { - component: React.ReactElement; - id: string; - priority?: number; - }): string => { - return npBanners.replace(id, mountForComponent(component), priority); - }; -} - -/** - * A singleton instance meant to represent all Kibana banners. - */ -export const banners = new Banners(); diff --git a/src/legacy/ui/public/notify/banners/index.ts b/src/legacy/ui/public/notify/banners/index.ts deleted file mode 100644 index 9221f95074cd9..0000000000000 --- a/src/legacy/ui/public/notify/banners/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { banners } from './banners'; diff --git a/src/legacy/ui/public/notify/fatal_error.ts b/src/legacy/ui/public/notify/fatal_error.ts deleted file mode 100644 index 5614ffea7913e..0000000000000 --- a/src/legacy/ui/public/notify/fatal_error.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { npSetup } from 'ui/new_platform'; -import { AngularHttpError, addFatalError } from '../../../../plugins/kibana_legacy/public'; - -export function fatalError(error: AngularHttpError | Error | string, location?: string) { - addFatalError(npSetup.core.fatalErrors, error, location); -} diff --git a/src/legacy/ui/public/notify/index.d.ts b/src/legacy/ui/public/notify/index.d.ts deleted file mode 100644 index 3c1a7ba02db72..0000000000000 --- a/src/legacy/ui/public/notify/index.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { toastNotifications } from './toasts'; -export { fatalError } from './fatal_error'; diff --git a/src/legacy/ui/public/notify/index.js b/src/legacy/ui/public/notify/index.js deleted file mode 100644 index 51394033e4d2e..0000000000000 --- a/src/legacy/ui/public/notify/index.js +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { fatalError } from './fatal_error'; -export { toastNotifications } from './toasts'; -export { banners } from './banners'; diff --git a/src/legacy/ui/public/notify/toasts/index.ts b/src/legacy/ui/public/notify/toasts/index.ts deleted file mode 100644 index c5e8e3b7ca7a3..0000000000000 --- a/src/legacy/ui/public/notify/toasts/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -export { ToastNotifications } from './toast_notifications'; -export { toastNotifications } from './toasts'; diff --git a/src/legacy/ui/public/notify/toasts/toast_notifications.ts b/src/legacy/ui/public/notify/toasts/toast_notifications.ts deleted file mode 100644 index d3ec8edb5d73a..0000000000000 --- a/src/legacy/ui/public/notify/toasts/toast_notifications.ts +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/** - * ToastNotifications is deprecated! Please use npSetup.core.notifications.toasts instead - */ -export { ToastNotifications } from '../../../../../plugins/kibana_legacy/public'; diff --git a/src/legacy/ui/public/notify/toasts/toasts.ts b/src/legacy/ui/public/notify/toasts/toasts.ts deleted file mode 100644 index 15be7a7891553..0000000000000 --- a/src/legacy/ui/public/notify/toasts/toasts.ts +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { npSetup } from 'ui/new_platform'; -import { ToastNotifications } from './toast_notifications'; -export const toastNotifications = new ToastNotifications(npSetup.core.notifications.toasts); diff --git a/src/legacy/ui/public/private/__tests__/private.js b/src/legacy/ui/public/private/__tests__/private.js deleted file mode 100644 index 1f9d696bb440f..0000000000000 --- a/src/legacy/ui/public/private/__tests__/private.js +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; - -describe('Private module loader', function () { - let Private; - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function ($injector) { - Private = $injector.get('Private'); - }) - ); - - it('accepts a provider that will be called to init a module', function () { - const football = {}; - function Provider() { - return football; - } - - const instance = Private(Provider); - expect(instance).to.be(football); - }); - - it('injects angular dependencies into the Provider', function () { - function Provider(Private) { - return Private; - } - - const instance = Private(Provider); - expect(instance).to.be(Private); - }); - - it('detects circular dependencies', function () { - expect(function () { - function Provider1() { - Private(Provider2); - } - - function Provider2() { - Private(Provider1); - } - - Private(Provider1); - }).to.throwException(/circular/i); - }); - - it('always provides the same instance form the Provider', function () { - function Provider() { - return {}; - } - - expect(Private(Provider)).to.be(Private(Provider)); - }); - - describe('#stub', function () { - it('accepts a replacement instance for a Provider', function () { - const replaced = {}; - const replacement = {}; - - function Provider() { - return replaced; - } - - const instance = Private(Provider); - expect(instance).to.be(replaced); - - Private.stub(Provider, replacement); - - const instance2 = Private(Provider); - expect(instance2).to.be(replacement); - - Private.stub(Provider, replaced); - - const instance3 = Private(Provider); - expect(instance3).to.be(replaced); - }); - }); - - describe('#swap', function () { - it('accepts a new Provider that should replace an existing Provider', function () { - function Provider1() { - return {}; - } - - function Provider2() { - return {}; - } - - const instance1 = Private(Provider1); - expect(instance1).to.be.an('object'); - - Private.swap(Provider1, Provider2); - - const instance2 = Private(Provider1); - expect(instance2).to.be.an('object'); - expect(instance2).to.not.be(instance1); - - Private.swap(Provider1, Provider1); - - const instance3 = Private(Provider1); - expect(instance3).to.be(instance1); - }); - - it('gives the new Provider access to the Provider it replaced via an injectable dependency called $decorate', function () { - function Provider1() { - return {}; - } - - function Provider2($decorate) { - return { - instance1: $decorate(), - }; - } - - const instance1 = Private(Provider1); - expect(instance1).to.be.an('object'); - - Private.swap(Provider1, Provider2); - - const instance2 = Private(Provider1); - expect(instance2).to.have.property('instance1'); - expect(instance2.instance1).to.be(instance1); - }); - }); -}); diff --git a/src/legacy/ui/public/private/index.d.ts b/src/legacy/ui/public/private/index.d.ts deleted file mode 100644 index 3b692ba58cbe1..0000000000000 --- a/src/legacy/ui/public/private/index.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { IPrivate } from '../../../../plugins/kibana_legacy/public/'; diff --git a/src/legacy/ui/public/private/index.js b/src/legacy/ui/public/private/index.js deleted file mode 100644 index 3815f230df466..0000000000000 --- a/src/legacy/ui/public/private/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import './private'; diff --git a/src/legacy/ui/public/private/private.js b/src/legacy/ui/public/private/private.js deleted file mode 100644 index 7a0751959417e..0000000000000 --- a/src/legacy/ui/public/private/private.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { uiModules } from '../modules'; -import { PrivateProvider } from '../../../../plugins/kibana_legacy/public'; - -uiModules.get('kibana/private').provider('Private', PrivateProvider); diff --git a/src/legacy/ui/public/promises/__tests__/promises.js b/src/legacy/ui/public/promises/__tests__/promises.js deleted file mode 100644 index 7041aa2993376..0000000000000 --- a/src/legacy/ui/public/promises/__tests__/promises.js +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import sinon from 'sinon'; - -describe('Promise service', () => { - let Promise; - let $rootScope; - - const sandbox = sinon.createSandbox(); - function tick(ms = 0) { - sandbox.clock.tick(ms); - - // Ugly, but necessary for promises to resolve: https://github.com/angular/angular.js/issues/12555 - $rootScope.$apply(); - } - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(($injector) => { - sandbox.useFakeTimers(); - - Promise = $injector.get('Promise'); - $rootScope = $injector.get('$rootScope'); - }) - ); - - afterEach(() => sandbox.restore()); - - describe('Constructor', () => { - it('provides resolve and reject function', () => { - const executor = sinon.stub(); - new Promise(executor); - - sinon.assert.calledOnce(executor); - sinon.assert.calledWithExactly(executor, sinon.match.func, sinon.match.func); - }); - }); - - it('Promise.resolve', () => { - const onResolve = sinon.stub(); - Promise.resolve(true).then(onResolve); - - tick(); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, true); - }); - - describe('Promise.fromNode', () => { - it('creates a callback that controls a promise', () => { - const callback = sinon.stub(); - Promise.fromNode(callback); - - tick(); - - sinon.assert.calledOnce(callback); - sinon.assert.calledWithExactly(callback, sinon.match.func); - }); - - it('rejects if the callback receives an error', () => { - const err = new Error(); - const onReject = sinon.stub(); - Promise.fromNode(sinon.stub().yields(err)).catch(onReject); - - tick(); - - sinon.assert.calledOnce(onReject); - sinon.assert.calledWithExactly(onReject, sinon.match.same(err)); - }); - - it('resolves with the second argument', () => { - const result = {}; - const onResolve = sinon.stub(); - Promise.fromNode(sinon.stub().yields(null, result)).then(onResolve); - - tick(); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, sinon.match.same(result)); - }); - - it('resolves with an array if multiple arguments are received', () => { - const result1 = {}; - const result2 = {}; - const onResolve = sinon.stub(); - Promise.fromNode(sinon.stub().yields(null, result1, result2)).then(onResolve); - - tick(); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, [ - sinon.match.same(result1), - sinon.match.same(result2), - ]); - }); - - it('resolves with an array if multiple undefined are received', () => { - const onResolve = sinon.stub(); - Promise.fromNode(sinon.stub().yields(null, undefined, undefined)).then(onResolve); - - tick(); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, [undefined, undefined]); - }); - }); - - describe('Promise.race()', () => { - it(`resolves with the first resolved promise's value`, () => { - const p1 = new Promise((resolve) => setTimeout(resolve, 100, 1)); - const p2 = new Promise((resolve) => setTimeout(resolve, 200, 2)); - const onResolve = sinon.stub(); - Promise.race([p1, p2]).then(onResolve); - - tick(200); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, 1); - }); - - it(`rejects with the first rejected promise's rejection reason`, () => { - const p1Error = new Error('1'); - const p1 = new Promise((r, reject) => setTimeout(reject, 200, p1Error)); - - const p2Error = new Error('2'); - const p2 = new Promise((r, reject) => setTimeout(reject, 100, p2Error)); - - const onReject = sinon.stub(); - Promise.race([p1, p2]).catch(onReject); - - tick(200); - - sinon.assert.calledOnce(onReject); - sinon.assert.calledWithExactly(onReject, sinon.match.same(p2Error)); - }); - - it('does not wait for subsequent promises to resolve/reject', () => { - const onP1Resolve = sinon.stub(); - const p1 = new Promise((resolve) => setTimeout(resolve, 100)).then(onP1Resolve); - - const onP2Resolve = sinon.stub(); - const p2 = new Promise((resolve) => setTimeout(resolve, 101)).then(onP2Resolve); - - const onResolve = sinon.stub(); - Promise.race([p1, p2]).then(onResolve); - - tick(100); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledOnce(onP1Resolve); - sinon.assert.callOrder(onP1Resolve, onResolve); - sinon.assert.notCalled(onP2Resolve); - }); - - it('allows non-promises in the array', () => { - const onResolve = sinon.stub(); - Promise.race([1, 2, 3]).then(onResolve); - - tick(); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, 1); - }); - - describe('argument is undefined', () => { - it('rejects the promise', () => { - const football = {}; - const onReject = sinon.stub(); - Promise.race() - .catch(() => football) - .then(onReject); - - tick(); - - sinon.assert.calledOnce(onReject); - sinon.assert.calledWithExactly(onReject, sinon.match.same(football)); - }); - }); - - describe('argument is a string', () => { - it(`resolves with the first character`, () => { - const onResolve = sinon.stub(); - Promise.race('abc').then(onResolve); - - tick(); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, 'a'); - }); - }); - - describe('argument is a non-iterable object', () => { - it('reject the promise', () => { - const football = {}; - const onReject = sinon.stub(); - Promise.race({}) - .catch(() => football) - .then(onReject); - - tick(); - - sinon.assert.calledOnce(onReject); - sinon.assert.calledWithExactly(onReject, sinon.match.same(football)); - }); - }); - - describe('argument is a generator', () => { - it('resolves with the first resolved value', () => { - function* gen() { - yield new Promise((resolve) => setTimeout(resolve, 100, 1)); - yield new Promise((resolve) => setTimeout(resolve, 200, 2)); - } - - const onResolve = sinon.stub(); - Promise.race(gen()).then(onResolve); - - tick(200); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, 1); - }); - - it('resolves with the first non-promise value', () => { - function* gen() { - yield 1; - yield new Promise((resolve) => setTimeout(resolve, 200, 2)); - } - - const onResolve = sinon.stub(); - Promise.race(gen()).then(onResolve); - - tick(200); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, 1); - }); - - it('iterates all values from the generator, even if one is already "resolved"', () => { - let yieldCount = 0; - function* gen() { - yieldCount += 1; - yield 1; - yieldCount += 1; - yield new Promise((resolve) => setTimeout(resolve, 200, 2)); - } - - const onResolve = sinon.stub(); - Promise.race(gen()).then(onResolve); - - tick(200); - - sinon.assert.calledOnce(onResolve); - sinon.assert.calledWithExactly(onResolve, 1); - expect(yieldCount).to.be(2); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/promises/defer.ts b/src/legacy/ui/public/promises/defer.ts deleted file mode 100644 index 3d435f2ba8dfd..0000000000000 --- a/src/legacy/ui/public/promises/defer.ts +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export interface Defer { - promise: Promise; - resolve(value: T): void; - reject(reason: Error): void; -} - -export function createDefer(Class: typeof Promise): Defer { - const defer: Partial> = {}; - defer.promise = new Class((resolve, reject) => { - defer.resolve = resolve; - defer.reject = reject; - }); - return defer as Defer; -} diff --git a/src/legacy/ui/public/promises/index.ts b/src/legacy/ui/public/promises/index.ts deleted file mode 100644 index 07bb2554c5eda..0000000000000 --- a/src/legacy/ui/public/promises/index.ts +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { PromiseServiceCreator } from '../../../../plugins/kibana_legacy/public'; -export { createDefer } from './defer'; -// @ts-ignore -import { uiModules } from '../modules'; - -const module = uiModules.get('kibana'); -// Provides a tiny subset of the excellent API from -// bluebird, reimplemented using the $q service -module.service('Promise', PromiseServiceCreator); diff --git a/src/legacy/ui/public/react_components.js b/src/legacy/ui/public/react_components.js deleted file mode 100644 index 21fb53d6407fa..0000000000000 --- a/src/legacy/ui/public/react_components.js +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import 'ngreact'; - -import { EuiIcon, EuiIconTip } from '@elastic/eui'; - -import { uiModules } from './modules'; - -const app = uiModules.get('app/kibana', ['react']); - -app.directive('icon', (reactDirective) => reactDirective(EuiIcon)); - -app.directive('iconTip', (reactDirective) => - reactDirective(EuiIconTip, ['content', 'type', 'position', 'title', 'color']) -); diff --git a/src/legacy/ui/public/registry/__tests__/registry.js b/src/legacy/ui/public/registry/__tests__/registry.js deleted file mode 100644 index 10b2423abc0e3..0000000000000 --- a/src/legacy/ui/public/registry/__tests__/registry.js +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { uiRegistry } from '../_registry'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; - -describe('Registry', function () { - let Private; - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function ($injector) { - Private = $injector.get('Private'); - }) - ); - - it('is technically a function', function () { - const reg = uiRegistry(); - expect(reg).to.be.a('function'); - }); - - describe('#register', function () { - it('accepts a Private module', function () { - const reg = uiRegistry(); - const mod = function SomePrivateModule() {}; - - reg.register(mod); - // modules are not exposed, so this is the most that we can test - }); - - it('applies the filter function if one is specified', function () { - const reg = uiRegistry({ - filter: (item) => item.value % 2 === 0, // register only even numbers - }); - - reg.register(() => ({ value: 17 })); - reg.register(() => ({ value: 18 })); // only this one should get registered - reg.register(() => ({ value: 19 })); - - const modules = Private(reg); - expect(modules).to.have.length(1); - expect(modules[0].value).to.be(18); - }); - }); - - describe('as a module', function () { - it('exposes the list of registered modules', function () { - const reg = uiRegistry(); - const mod = function SomePrivateModule(Private) { - this.PrivateModuleLoader = Private; - }; - - reg.register(mod); - const modules = Private(reg); - expect(modules).to.have.length(1); - expect(modules[0]).to.have.property('PrivateModuleLoader', Private); - }); - }); - - describe('spec', function () { - it('executes with the module list as "this", and can override it', function () { - let self; - - const reg = uiRegistry({ - constructor: function () { - return { mods: (self = this) }; - }, - }); - - const modules = Private(reg); - expect(modules).to.be.an('object'); - expect(modules).to.have.property('mods', self); - }); - }); - - describe('spec.name', function () { - it('sets the displayName of the registry and the name param on the final instance', function () { - const reg = uiRegistry({ - name: 'visTypes', - }); - - expect(reg).to.have.property('displayName', '[registry visTypes]'); - expect(Private(reg)).to.have.property('name', 'visTypes'); - }); - }); - - describe('spec.constructor', function () { - it('executes before the modules are returned', function () { - let i = 0; - - const reg = uiRegistry({ - constructor: function () { - i = i + 1; - }, - }); - - Private(reg); - expect(i).to.be(1); - }); - - it('executes with the module list as "this", and can override it', function () { - let self; - - const reg = uiRegistry({ - constructor: function () { - return { mods: (self = this) }; - }, - }); - - const modules = Private(reg); - expect(modules).to.be.an('object'); - expect(modules).to.have.property('mods', self); - }); - }); - - describe('spec.invokeProviders', () => { - it('is called with the registered providers and defines the initial set of values in the registry', () => { - const reg = uiRegistry({ - invokeProviders(providers) { - return providers.map((i) => i * 1000); - }, - }); - - reg.register(1); - reg.register(2); - reg.register(3); - expect(Private(reg).toJSON()).to.eql([1000, 2000, 3000]); - }); - it('does not get assigned as a property of the registry', () => { - expect( - uiRegistry({ - invokeProviders() {}, - }) - ).to.not.have.property('invokeProviders'); - }); - }); - - describe('spec[any]', function () { - it('mixes the extra properties into the module list', function () { - const reg = uiRegistry({ - someMethod: function () { - return this; - }, - }); - - const modules = Private(reg); - expect(modules).to.have.property('someMethod'); - expect(modules.someMethod()).to.be(modules); - }); - }); -}); diff --git a/src/legacy/ui/public/registry/_registry.d.ts b/src/legacy/ui/public/registry/_registry.d.ts deleted file mode 100644 index 42f1bb521763c..0000000000000 --- a/src/legacy/ui/public/registry/_registry.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { IndexedArray, IndexedArrayConfig } from '../indexed_array'; - -// eslint-disable-next-line @typescript-eslint/no-empty-interface -interface UIRegistry extends IndexedArray {} - -interface UIRegistrySpec extends IndexedArrayConfig { - name: string; - filter?(item: T): boolean; -} - -/** - * Creates a new UiRegistry (See js method for detailed documentation) - * The generic type T is the type of objects which are stored in the registry. - * The generic type A is an interface of accessors which depend on the - * fields of the objects stored in the registry. - * Example: if there is a string field "name" in type T, then A should be - * `{ byName: { [typeName: string]: T }; }` - */ -declare function uiRegistry( - spec: UIRegistrySpec -): { (): UIRegistry & A; register(privateModule: T): UIRegistry & A }; diff --git a/src/legacy/ui/public/registry/_registry.js b/src/legacy/ui/public/registry/_registry.js deleted file mode 100644 index 85aa1d9f2eca8..0000000000000 --- a/src/legacy/ui/public/registry/_registry.js +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import { IndexedArray } from '../indexed_array'; - -const notPropsOptNames = IndexedArray.OPT_NAMES.concat('constructor', 'invokeProviders'); - -/** - * Create a registry, which is just a Private module provider. - * - * The registry allows modifying the values it will provide - * using the #register method. - * - * To access these modules, pass the registry to the Private - * module loader. - * - * # Examples - * - * + register a module - * ```js - * let registry = require('ui/registry/vis_types'); - * registry.add(function InjectablePrivateModule($http, Promise) { - * ... - * }) - * ``` - * - * + get all registered modules - * ```js - * let visTypes = Private(RegistryVisTypesProvider); - * ``` - * - * - * @param {object} [spec] - an object describing the properties of - * the registry to create. Any property specified - * that is not listed below will be mixed into the - * final IndexedArray object. - * - * # init - * @param {Function} [spec.constructor] - an injectable function that is called when - * the registry is first instantiated by the app. - * @param {boolean} [spec.filter] - function that will be used to filter items before - * registering them. Function will called on each item and - * should return true to keep the item (register it) or - * skip it (don't register it) - * - * # IndexedArray params - * @param {array[String]} [spec.index] - passed to the IndexedArray constructor - * @param {array[String]} [spec.group] - passed to the IndexedArray constructor - * @param {array[String]} [spec.order] - passed to the IndexedArray constructor - * @param {array[String]} [spec.initialSet] - passed to the IndexedArray constructor - * @param {array[String]} [spec.immutable] - passed to the IndexedArray constructor - * - * @return {[type]} [description] - */ -export function uiRegistry(spec) { - spec = spec || {}; - - const constructor = _.has(spec, 'constructor') && spec.constructor; - const filter = _.has(spec, 'filter') && spec.filter; - const invokeProviders = _.has(spec, 'invokeProviders') && spec.invokeProviders; - const iaOpts = _.defaults(_.pick(spec, IndexedArray.OPT_NAMES), { index: ['name'] }); - const props = _.omit(spec, notPropsOptNames); - const providers = []; - - let isInstantiated = false; - let getInvokedProviders; - let modules; - - /** - * This is the Private module that will be instantiated by - * - * @tag:PrivateModule - * @return {IndexedArray} - an indexed array containing the values - * that were registered, the registry spec - * defines how things will be indexed. - */ - const registry = function (Private, $injector) { - getInvokedProviders = function (newProviders) { - let set = invokeProviders - ? $injector.invoke(invokeProviders, undefined, { providers: newProviders }) - : newProviders.map(Private); - - if (filter && _.isFunction(filter)) { - set = set.filter((item) => filter(item)); - } - - return set; - }; - - iaOpts.initialSet = getInvokedProviders(providers); - - modules = new IndexedArray(iaOpts); - - // mixin other props - _.assign(modules, props); - - // construct - if (constructor) { - modules = $injector.invoke(constructor, modules) || modules; - } - - isInstantiated = true; - - return modules; - }; - - registry.displayName = '[registry ' + props.name + ']'; - - registry.register = function (privateModule) { - providers.push(privateModule); - - if (isInstantiated) { - const [provider] = getInvokedProviders([privateModule]); - - if (provider) { - modules.push(provider); - } - } - - return registry; - }; - - return registry; -} diff --git a/src/legacy/ui/public/registry/chrome_header_nav_controls.ts b/src/legacy/ui/public/registry/chrome_header_nav_controls.ts deleted file mode 100644 index 7e6c80692cd63..0000000000000 --- a/src/legacy/ui/public/registry/chrome_header_nav_controls.ts +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { IndexedArray } from '../indexed_array'; -import { uiRegistry, UIRegistry } from './_registry'; - -interface ChromeHeaderNavControlsRegistryAccessors { - bySide: { [typeName: string]: IndexedArray }; -} - -export enum NavControlSide { - Left = 'left', - Right = 'right', -} - -export interface NavControl { - name: string; - order: number; - side: NavControlSide; - render: (targetDomElement: HTMLDivElement) => () => void; -} - -export type ChromeHeaderNavControlsRegistry = UIRegistry & - ChromeHeaderNavControlsRegistryAccessors; - -export const chromeHeaderNavControlsRegistry = uiRegistry< - NavControl, - ChromeHeaderNavControlsRegistryAccessors ->({ - name: 'chromeHeaderNavControls', - order: ['order'], - group: ['side'], -}); diff --git a/src/legacy/ui/public/registry/field_format_editors.ts b/src/legacy/ui/public/registry/field_format_editors.ts deleted file mode 100644 index 5489ce9250e04..0000000000000 --- a/src/legacy/ui/public/registry/field_format_editors.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { uiRegistry } from './_registry'; - -export const RegistryFieldFormatEditorsProvider = uiRegistry({ - name: 'fieldFormatEditors', - index: ['formatId'], -}); diff --git a/src/legacy/ui/public/routes/__tests__/_route_manager.js b/src/legacy/ui/public/routes/__tests__/_route_manager.js deleted file mode 100644 index eb47a3e9ace70..0000000000000 --- a/src/legacy/ui/public/routes/__tests__/_route_manager.js +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import ngMock from 'ng_mock'; -import sinon from 'sinon'; -import RouteManager from '../route_manager'; -import expect from '@kbn/expect'; - -let routes; // will contain an new instance of RouteManager for each test -const chainableMethods = [ - { name: 'when', args: ['', {}] }, - { name: 'otherwise', args: [{}] }, - { name: 'defaults', args: [/regexp/, {}] }, -]; - -let $rp; -describe('routes/route_manager', function () { - beforeEach( - ngMock.module('kibana', function ($routeProvider) { - $rp = $routeProvider; - sinon.stub($rp, 'otherwise'); - sinon.stub($rp, 'when'); - }) - ); - - beforeEach( - ngMock.inject(function () { - routes = new RouteManager(); - }) - ); - - it('should have chainable methods: ' + _.map(chainableMethods, 'name').join(', '), function () { - chainableMethods.forEach(function (meth) { - expect(routes[meth.name].apply(routes, _.clone(meth.args))).to.be(routes); - }); - }); - - describe('#otherwise', function () { - it('should forward the last otherwise route', function () { - const otherRoute = {}; - routes.otherwise({}); - routes.otherwise(otherRoute); - - routes.config($rp); - - expect($rp.otherwise.callCount).to.be(1); - expect($rp.otherwise.getCall(0).args[0]).to.be(otherRoute); - }); - }); - - describe('#when', function () { - it('should merge the additions into the when() defined routes', function () { - routes.when('/some/route'); - routes.when('/some/other/route'); - - // add the addition resolve to every route - routes.defaults(/.*/, { - resolve: { - addition: function () {}, - }, - }); - - routes.config($rp); - - // should have run once for each when route - expect($rp.when.callCount).to.be(2); - expect($rp.otherwise.callCount).to.be(0); - - // every route should have the "addition" resolve - expect($rp.when.getCall(0).args[1].resolve.addition).to.be.a('function'); - expect($rp.when.getCall(1).args[1].resolve.addition).to.be.a('function'); - }); - }); - - describe('#config', function () { - it('should add defined routes to the global $routeProvider service in order', function () { - const args = [ - ['/one', {}], - ['/two', {}], - ]; - - args.forEach(function (a) { - routes.when(a[0], a[1]); - }); - - routes.config($rp); - - expect($rp.when.callCount).to.be(args.length); - _.times(args.length, function (i) { - const call = $rp.when.getCall(i); - const a = args.shift(); - - expect(call.args[0]).to.be(a[0]); - expect(call.args[1]).to.be(a[1]); - }); - }); - - it('sets route.reloadOnSearch to false by default', function () { - routes.when('/nothing-set'); - routes.when('/no-reload', { reloadOnSearch: false }); - routes.when('/always-reload', { reloadOnSearch: true }); - routes.config($rp); - - expect($rp.when.callCount).to.be(3); - expect($rp.when.firstCall.args[1]).to.have.property('reloadOnSearch', false); - expect($rp.when.secondCall.args[1]).to.have.property('reloadOnSearch', false); - expect($rp.when.lastCall.args[1]).to.have.property('reloadOnSearch', true); - }); - }); - - describe('#defaults()', () => { - it('adds defaults to routes with matching paths', () => { - routes.when('/foo', { name: 'foo' }); - routes.when('/bar', { name: 'bar' }); - routes.when('/baz', { name: 'baz' }); - routes.defaults(/^\/ba/, { - withDefaults: true, - }); - routes.config($rp); - - sinon.assert.calledWithExactly( - $rp.when, - '/foo', - sinon.match({ name: 'foo', withDefaults: undefined }) - ); - sinon.assert.calledWithExactly( - $rp.when, - '/bar', - sinon.match({ name: 'bar', withDefaults: true }) - ); - sinon.assert.calledWithExactly( - $rp.when, - '/baz', - sinon.match({ name: 'baz', withDefaults: true }) - ); - }); - - it('does not override values specified in the route', () => { - routes.when('/foo', { name: 'foo' }); - routes.defaults(/./, { name: 'bar' }); - routes.config($rp); - - sinon.assert.calledWithExactly($rp.when, '/foo', sinon.match({ name: 'foo' })); - }); - - // See https://github.com/elastic/kibana/issues/13294 - it('does not assign defaults by reference, to prevent accidentally merging unrelated defaults together', () => { - routes.when('/foo', { name: 'foo' }); - routes.when('/bar', { name: 'bar' }); - routes.when('/baz', { name: 'baz', funcs: { bazFunc() {} } }); - - // multiple defaults must be defined that, when applied correctly, will - // create a new object property on all routes that is unique to all of them - routes.defaults(/./, { funcs: { all() {} } }); - routes.defaults(/^\/foo/, { funcs: { fooFunc() {} } }); - routes.defaults(/^\/bar/, { funcs: { barFunc() {} } }); - routes.config($rp); - - sinon.assert.calledThrice($rp.when); - sinon.assert.calledWithExactly( - $rp.when, - '/foo', - sinon.match({ - name: 'foo', - funcs: sinon.match({ - all: sinon.match.func, - fooFunc: sinon.match.func, - barFunc: undefined, - bazFunc: undefined, - }), - }) - ); - sinon.assert.calledWithExactly( - $rp.when, - '/bar', - sinon.match({ - name: 'bar', - funcs: sinon.match({ - all: sinon.match.func, - fooFunc: undefined, - barFunc: sinon.match.func, - bazFunc: undefined, - }), - }) - ); - sinon.assert.calledWithExactly( - $rp.when, - '/baz', - sinon.match({ - name: 'baz', - funcs: sinon.match({ - all: sinon.match.func, - fooFunc: undefined, - barFunc: undefined, - bazFunc: sinon.match.func, - }), - }) - ); - }); - }); -}); diff --git a/src/legacy/ui/public/routes/__tests__/_work_queue.js b/src/legacy/ui/public/routes/__tests__/_work_queue.js deleted file mode 100644 index 72891f7321fbd..0000000000000 --- a/src/legacy/ui/public/routes/__tests__/_work_queue.js +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import { WorkQueue } from '../work_queue'; -import sinon from 'sinon'; -import { createDefer } from 'ui/promises'; - -describe('work queue', function () { - let queue; - let Promise; - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function (_Promise_) { - Promise = _Promise_; - }) - ); - beforeEach(function () { - queue = new WorkQueue(); - }); - afterEach(function () { - queue.empty(); - }); - - describe('#push', function () { - it('adds to the interval queue', function () { - queue.push(createDefer(Promise)); - expect(queue).to.have.length(1); - }); - }); - - describe('#resolveWhenFull', function () { - it('resolves requests waiting for the queue to fill when appropriate', function () { - const size = _.random(5, 50); - queue.limit = size; - - const whenFull = createDefer(Promise); - sinon.stub(whenFull, 'resolve'); - queue.resolveWhenFull(whenFull); - - // push all but one into the queue - _.times(size - 1, function () { - queue.push(createDefer(Promise)); - }); - - expect(whenFull.resolve.callCount).to.be(0); - queue.push(createDefer(Promise)); - expect(whenFull.resolve.callCount).to.be(1); - - queue.empty(); - }); - }); - - /** - * Fills the queue with a random number of work defers, but stubs all defer methods - * with the same stub and passed it back. - * - * @param {function} then - called with then(size, stub) so that the test - * can manipulate the filled queue - */ - function fillWithStubs(then) { - const size = _.random(5, 50); - const stub = sinon.stub(); - - _.times(size, function () { - const d = createDefer(Promise); - // overwrite the defer methods with the stub - d.resolve = stub; - d.reject = stub; - queue.push(d); - }); - - then(size, stub); - } - - describe('#doWork', function () { - it('flushes the queue and resolves all promises', function () { - fillWithStubs(function (size, stub) { - expect(queue).to.have.length(size); - queue.doWork(); - expect(queue).to.have.length(0); - expect(stub.callCount).to.be(size); - }); - }); - }); - - describe('#empty()', function () { - it('empties the internal queue WITHOUT resolving any promises', function () { - fillWithStubs(function (size, stub) { - expect(queue).to.have.length(size); - queue.empty(); - expect(queue).to.have.length(0); - expect(stub.callCount).to.be(0); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/routes/__tests__/_wrap_route_with_prep.js b/src/legacy/ui/public/routes/__tests__/_wrap_route_with_prep.js deleted file mode 100644 index 8ae85fce591a1..0000000000000 --- a/src/legacy/ui/public/routes/__tests__/_wrap_route_with_prep.js +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import RouteManager from '../route_manager'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; - -import _ from 'lodash'; -import '../../private'; - -let routes; - -describe('wrapRouteWithPrep fn', function () { - require('test_utils/no_digest_promises').activateForSuite(); - - beforeEach(function () { - routes = new RouteManager(); - }); - - const SchedulingTest = function (opts) { - opts = opts || {}; - - const delaySetup = opts.delayUserWork ? 0 : 50; - const delayUserWork = opts.delayUserWork ? 50 : 0; - - return function () { - ngMock.module('kibana'); - let setupComplete = false; - let userWorkComplete = false; - let route; - let Promise; - let $injector; - - ngMock.inject(function (_Promise_, _$injector_) { - Promise = _Promise_; - $injector = _$injector_; - }); - - routes.addSetupWork(function () { - return new Promise(function (resolve) { - setTimeout(function () { - setupComplete = true; - resolve(); - }, delaySetup); - }); - }); - - routes - .when('/', { - resolve: { - test: function () { - expect(setupComplete).to.be(true); - userWorkComplete = true; - }, - }, - }) - .config({ - when: function (p, _r) { - route = _r; - }, - }); - - return new Promise(function (resolve, reject) { - setTimeout(function () { - Promise.all( - _.map(route.resolve, function (fn) { - return $injector.invoke(fn); - }) - ) - .then(function () { - expect(setupComplete).to.be(true); - expect(userWorkComplete).to.be(true); - }) - .then(resolve, reject); - }, delayUserWork); - }); - }; - }; - - it('always waits for setup to complete before calling user work', new SchedulingTest()); - - it('does not call user work when setup fails', new SchedulingTest({ failSetup: true })); - - it( - 'calls all user work even if it is not initialized until after setup is complete', - new SchedulingTest({ - delayUserWork: false, - }) - ); -}); diff --git a/src/legacy/ui/public/routes/__tests__/index.js b/src/legacy/ui/public/routes/__tests__/index.js deleted file mode 100644 index 30cedaaca3d19..0000000000000 --- a/src/legacy/ui/public/routes/__tests__/index.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import './_route_manager'; -import './_work_queue'; -import './_wrap_route_with_prep'; -describe('Custom Route Management', function () {}); diff --git a/src/legacy/ui/public/routes/breadcrumbs.js b/src/legacy/ui/public/routes/breadcrumbs.js deleted file mode 100644 index 7917ffbd7c6e6..0000000000000 --- a/src/legacy/ui/public/routes/breadcrumbs.js +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { trim, startCase } from 'lodash'; - -/** - * Take a path (from $location.path() usually) and parse - * it's segments into a list of breadcrumbs - * - * @param {string} path - * @return {Array} - */ -export function parsePathToBreadcrumbs(path) { - return trim(path, '/') - .split('/') - .reduce( - (acc, id, i, parts) => [ - ...acc, - { - id, - display: startCase(id), - href: i === 0 ? `#/${id}` : `${acc[i - 1].href}/${id}`, - current: i === parts.length - 1, - }, - ], - [] - ); -} diff --git a/src/legacy/ui/public/routes/index.d.ts b/src/legacy/ui/public/routes/index.d.ts deleted file mode 100644 index 84e17b0a69452..0000000000000 --- a/src/legacy/ui/public/routes/index.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { uiRoutes, UIRoutes } from 'ui/routes/routes'; - -// eslint-disable-next-line import/no-default-export -export default uiRoutes; -export { UIRoutes }; diff --git a/src/legacy/ui/public/routes/index.js b/src/legacy/ui/public/routes/index.js deleted file mode 100644 index 9c826ebee1230..0000000000000 --- a/src/legacy/ui/public/routes/index.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { uiRoutes } from './routes'; - -// eslint-disable-next-line import/no-default-export -export default uiRoutes; diff --git a/src/legacy/ui/public/routes/route_manager.d.ts b/src/legacy/ui/public/routes/route_manager.d.ts deleted file mode 100644 index a5261a7c8ee3a..0000000000000 --- a/src/legacy/ui/public/routes/route_manager.d.ts +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * WARNING: these types are incomplete - */ - -import { ChromeBreadcrumb } from '../../../../core/public'; - -export interface RouteConfiguration { - controller?: string | ((...args: any[]) => void); - redirectTo?: string; - resolveRedirectTo?: (...args: any[]) => void; - reloadOnSearch?: boolean; - reloadOnUrl?: boolean; - outerAngularWrapperRoute?: boolean; - resolve?: object; - template?: string; - k7Breadcrumbs?: (...args: any[]) => ChromeBreadcrumb[]; - requireUICapability?: string; -} - -interface RouteManager { - addSetupWork(cb: (...args: any[]) => void): void; - when(path: string, routeConfiguration: RouteConfiguration): RouteManager; - otherwise(routeConfiguration: RouteConfiguration): RouteManager; - defaults(path: string | RegExp, defaults: RouteConfiguration): RouteManager; -} - -// eslint-disable-next-line import/no-default-export -export default RouteManager; diff --git a/src/legacy/ui/public/routes/route_manager.js b/src/legacy/ui/public/routes/route_manager.js deleted file mode 100644 index de8a541d1c50a..0000000000000 --- a/src/legacy/ui/public/routes/route_manager.js +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { cloneDeep, defaultsDeep, wrap } from 'lodash'; - -import { wrapRouteWithPrep } from './wrap_route_with_prep'; -import { RouteSetupManager } from './route_setup_manager'; -import { parsePathToBreadcrumbs } from './breadcrumbs'; - -// eslint-disable-next-line import/no-default-export -export default function RouteManager() { - const self = this; - const setup = new RouteSetupManager(); - const when = []; - const defaults = []; - let otherwise; - - self.config = function ($routeProvider) { - when.forEach(function (args) { - const path = args[0]; - const route = args[1] || {}; - - defaults.forEach((def) => { - if (def.regex.test(path)) { - defaultsDeep(route, cloneDeep(def.value)); - } - }); - - if (route.reloadOnSearch == null) { - route.reloadOnSearch = false; - } - - wrapRouteWithPrep(route, setup); - $routeProvider.when(path, route); - }); - - if (otherwise) { - wrapRouteWithPrep(otherwise, setup); - $routeProvider.otherwise(otherwise); - } - }; - - self.run = function ($location, $route, $injector, $rootScope) { - if (window.elasticApm && typeof window.elasticApm.startTransaction === 'function') { - /** - * capture route-change events as transactions which happens after - * the browser's on load event. - * - * In Kibana app, this logic would run after the boostrap js files gets - * downloaded and get associated with the page-load transaction - */ - $rootScope.$on('$routeChangeStart', (_, nextRoute) => { - if (nextRoute.$$route) { - const name = nextRoute.$$route.originalPath; - window.elasticApm.startTransaction(name, 'route-change'); - } - }); - } - - self.getBreadcrumbs = () => { - const breadcrumbs = parsePathToBreadcrumbs($location.path()); - const map = $route.current.mapBreadcrumbs; - return map ? $injector.invoke(map, null, { breadcrumbs }) : breadcrumbs; - }; - }; - - const wrapSetupAndChain = (fn, ...args) => { - fn.apply(setup, args); - return this; - }; - - this.addSetupWork = wrap(setup.addSetupWork, wrapSetupAndChain); - this.afterSetupWork = wrap(setup.afterSetupWork, wrapSetupAndChain); - this.afterWork = wrap(setup.afterWork, wrapSetupAndChain); - - self.when = function (path, route) { - when.push([path, route]); - return self; - }; - - // before attaching the routes to the routeProvider, test the RE - // against the .when() path and add/override the resolves if there is a match - self.defaults = function (regex, value) { - defaults.push({ regex, value }); - return self; - }; - - self.otherwise = function (route) { - otherwise = route; - return self; - }; - - self.getBreadcrumbs = function () { - // overwritten in self.run(); - return []; - }; - - self.RouteManager = RouteManager; -} diff --git a/src/legacy/ui/public/routes/route_setup_manager.js b/src/legacy/ui/public/routes/route_setup_manager.js deleted file mode 100644 index a7a2f078f40fb..0000000000000 --- a/src/legacy/ui/public/routes/route_setup_manager.js +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import { createDefer } from 'ui/promises'; - -// Throw this inside of an Angular route resolver after calling `kbnUrl.change` -// so that the $router can observe the $location update. Otherwise, the location -// route setup work will resolve before the route update occurs. -export const WAIT_FOR_URL_CHANGE_TOKEN = new Error('WAIT_FOR_URL_CHANGE_TOKEN'); - -export class RouteSetupManager { - constructor() { - this.setupWork = []; - this.onSetupComplete = []; - this.onSetupError = []; - this.onWorkComplete = []; - this.onWorkError = []; - } - - addSetupWork(fn) { - this.setupWork.push(fn); - } - - afterSetupWork(onComplete, onError) { - this.onSetupComplete.push(onComplete); - this.onSetupError.push(onError); - } - - afterWork(onComplete, onError) { - this.onWorkComplete.push(onComplete); - this.onWorkError.push(onError); - } - - /** - * Do each setupWork function by injecting it with angular dependencies - * and accepting promises from it. - * @return {[type]} [description] - */ - doWork(Promise, $injector, userWork) { - const invokeEach = (arr, locals) => { - return Promise.map(arr, (fn) => { - if (!fn) return; - return $injector.invoke(fn, null, locals); - }); - }; - - // call each error handler in order, until one of them resolves - // or we run out of handlers - const callErrorHandlers = (handlers, origError) => { - if (!_.size(handlers)) throw origError; - - // clone so we don't discard handlers or loose them - handlers = handlers.slice(0); - - const next = (err) => { - if (!handlers.length) throw err; - - const handler = handlers.shift(); - if (!handler) return next(err); - - return Promise.try(function () { - return $injector.invoke(handler, null, { err }); - }).catch(next); - }; - - return next(origError); - }; - - return invokeEach(this.setupWork) - .then( - () => invokeEach(this.onSetupComplete), - (err) => callErrorHandlers(this.onSetupError, err) - ) - .then(() => { - // wait for the queue to fill up, then do all the work - const defer = createDefer(Promise); - userWork.resolveWhenFull(defer); - - return defer.promise.then(() => Promise.all(userWork.doWork())); - }) - .catch((error) => { - if (error === WAIT_FOR_URL_CHANGE_TOKEN) { - // prevent moving forward, return a promise that never resolves - // so that the $router can observe the $location update - return Promise.halt(); - } - - throw error; - }) - .then( - () => invokeEach(this.onWorkComplete), - (err) => callErrorHandlers(this.onWorkError, err) - ); - } -} diff --git a/src/legacy/ui/public/routes/routes.d.ts b/src/legacy/ui/public/routes/routes.d.ts deleted file mode 100644 index d48230e9d56f9..0000000000000 --- a/src/legacy/ui/public/routes/routes.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import RouteManager from 'ui/routes/route_manager'; - -interface DefaultRouteManager extends RouteManager { - WAIT_FOR_URL_CHANGE_TOKEN: string; - enable(): void; -} - -export const uiRoutes: DefaultRouteManager; -export type UIRoutes = DefaultRouteManager; diff --git a/src/legacy/ui/public/routes/routes.js b/src/legacy/ui/public/routes/routes.js deleted file mode 100644 index 2cfb7a608e853..0000000000000 --- a/src/legacy/ui/public/routes/routes.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import RouteManager from './route_manager'; -import 'angular-route/angular-route'; -import { uiModules } from '../modules'; -import { WAIT_FOR_URL_CHANGE_TOKEN } from './route_setup_manager'; -const defaultRouteManager = new RouteManager(); - -export const uiRoutes = Object.create(defaultRouteManager, { - WAIT_FOR_URL_CHANGE_TOKEN: { - value: WAIT_FOR_URL_CHANGE_TOKEN, - }, - - enable: { - value() { - uiModules - .get('kibana', ['ngRoute']) - .config(defaultRouteManager.config) - .run(defaultRouteManager.run); - }, - }, -}); diff --git a/src/legacy/ui/public/routes/work_queue.js b/src/legacy/ui/public/routes/work_queue.js deleted file mode 100644 index 5c4c83663c590..0000000000000 --- a/src/legacy/ui/public/routes/work_queue.js +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export function WorkQueue() { - const q = this; - - const work = []; - const fullDefers = []; - - q.limit = 0; - Object.defineProperty(q, 'length', { - get: function () { - return work.length; - }, - }); - - const resolve = function (defers) { - return defers.splice(0).map(function (defer) { - return defer.resolve(); - }); - }; - - const checkIfFull = function () { - if (work.length >= q.limit && fullDefers.length) { - resolve(fullDefers); - } - }; - - q.resolveWhenFull = function (defer) { - fullDefers.push(defer); - checkIfFull(); - }; - - q.doWork = function () { - const resps = resolve(work); - checkIfFull(); - return resps; - }; - - q.empty = function () { - work.splice(0); - checkIfFull(); - }; - - q.push = function (defer) { - work.push(defer); - checkIfFull(); - }; -} diff --git a/src/legacy/ui/public/routes/wrap_route_with_prep.js b/src/legacy/ui/public/routes/wrap_route_with_prep.js deleted file mode 100644 index e9ed33148d9ac..0000000000000 --- a/src/legacy/ui/public/routes/wrap_route_with_prep.js +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import angular from 'angular'; -import _ from 'lodash'; -import { createDefer } from 'ui/promises'; -import { WorkQueue } from './work_queue'; - -export function wrapRouteWithPrep(route, setup) { - if (!route.resolve && route.redirectTo) return; - - const userWork = new WorkQueue(); - // the point at which we will consider the queue "full" - userWork.limit = _.keys(route.resolve).length; - - const resolve = { - __prep__: function ($injector) { - return $injector.invoke(setup.doWork, setup, { userWork }); - }, - }; - - // send each user resolve to the userWork queue, which will prevent it from running before the - // prep is complete - _.forOwn(route.resolve || {}, function (expr, name) { - resolve[name] = function ($injector, Promise) { - const defer = createDefer(Promise); - userWork.push(defer); - return defer.promise.then(function () { - return $injector[angular.isString(expr) ? 'get' : 'invoke'](expr); - }); - }; - }); - - // we're copied everything over so now overwrite - route.resolve = resolve; -} diff --git a/src/legacy/ui/public/state_management/__tests__/app_state.js b/src/legacy/ui/public/state_management/__tests__/app_state.js deleted file mode 100644 index c47fa3bb0417f..0000000000000 --- a/src/legacy/ui/public/state_management/__tests__/app_state.js +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import { AppStateProvider } from '../app_state'; - -describe('State Management', function () { - let $rootScope; - let AppState; - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function (_$rootScope_, _$location_, Private) { - $rootScope = _$rootScope_; - AppState = Private(AppStateProvider); - }) - ); - - describe('App State', function () { - let appState; - - beforeEach(function () { - appState = new AppState(); - }); - - it('should have _urlParam of _a', function () { - expect(appState).to.have.property('_urlParam'); - expect(appState._urlParam).to.equal('_a'); - }); - - it('should use passed in params', function () { - const params = { - test: true, - mock: false, - }; - - appState = new AppState(params); - expect(appState).to.have.property('_defaults'); - - Object.keys(params).forEach(function (key) { - expect(appState._defaults).to.have.property(key); - expect(appState._defaults[key]).to.equal(params[key]); - }); - }); - - it('should have a destroy method', function () { - expect(appState).to.have.property('destroy'); - }); - - it('should be destroyed on $routeChangeStart', function () { - const destroySpy = sinon.spy(appState, 'destroy'); - - $rootScope.$emit('$routeChangeStart'); - - expect(destroySpy.callCount).to.be(1); - }); - }); -}); diff --git a/src/legacy/ui/public/state_management/__tests__/config_provider.js b/src/legacy/ui/public/state_management/__tests__/config_provider.js deleted file mode 100644 index 9f756bc51dcb0..0000000000000 --- a/src/legacy/ui/public/state_management/__tests__/config_provider.js +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import '../config_provider'; - -describe('State Management Config', function () { - let stateManagementConfig; - - describe('is enabled', () => { - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function (_stateManagementConfig_) { - stateManagementConfig = _stateManagementConfig_; - }) - ); - - it('should be enabled by default', () => { - expect(stateManagementConfig.enabled).to.be(true); - }); - }); - - describe('can be disabled', () => { - beforeEach( - ngMock.module('kibana', function (stateManagementConfigProvider) { - stateManagementConfigProvider.disable(); - }) - ); - - beforeEach( - ngMock.inject(function (_stateManagementConfig_) { - stateManagementConfig = _stateManagementConfig_; - }) - ); - - it('is disabled by config', () => { - expect(stateManagementConfig.enabled).to.be(false); - }); - }); -}); diff --git a/src/legacy/ui/public/state_management/__tests__/global_state.js b/src/legacy/ui/public/state_management/__tests__/global_state.js deleted file mode 100644 index e9dae5880a8d1..0000000000000 --- a/src/legacy/ui/public/state_management/__tests__/global_state.js +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import '../global_state'; - -describe('State Management', function () { - let $location; - let state; - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function (_$location_, globalState) { - $location = _$location_; - state = globalState; - }) - ); - - describe('Global State', function () { - it('should use previous state when not in URL', function () { - // set satte via URL - $location.search({ _g: '(foo:(bar:baz))' }); - state.fetch(); - expect(state.toObject()).to.eql({ foo: { bar: 'baz' } }); - - $location.search({ _g: '(fizz:buzz)' }); - state.fetch(); - expect(state.toObject()).to.eql({ fizz: 'buzz' }); - - $location.search({}); - state.fetch(); - expect(state.toObject()).to.eql({ fizz: 'buzz' }); - }); - }); -}); diff --git a/src/legacy/ui/public/state_management/__tests__/state.js b/src/legacy/ui/public/state_management/__tests__/state.js deleted file mode 100644 index b6c705e814509..0000000000000 --- a/src/legacy/ui/public/state_management/__tests__/state.js +++ /dev/null @@ -1,393 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import { encode as encodeRison } from 'rison-node'; -import uiRoutes from 'ui/routes'; -import '../../private'; -import { toastNotifications } from '../../notify'; -import * as FatalErrorNS from '../../notify/fatal_error'; -import { StateProvider } from '../state'; -import { - unhashQuery, - createStateHash, - isStateHash, - HashedItemStore, -} from '../../../../../plugins/kibana_utils/public'; -import { StubBrowserStorage } from 'test_utils/stub_browser_storage'; -import { EventsProvider } from '../../events'; - -describe('State Management', () => { - const sandbox = sinon.createSandbox(); - afterEach(() => sandbox.restore()); - - uiRoutes.enable(); - - describe('Enabled', () => { - let $rootScope; - let $location; - let Events; - let setup; - - beforeEach(ngMock.module('kibana')); - beforeEach( - ngMock.inject(function (_$rootScope_, _$location_, Private, config) { - const State = Private(StateProvider); - $location = _$location_; - $rootScope = _$rootScope_; - Events = Private(EventsProvider); - - setup = (opts) => { - const { param, initial, storeInHash } = opts || {}; - sinon.stub(config, 'get').withArgs('state:storeInSessionStorage').returns(!!storeInHash); - const store = new StubBrowserStorage(); - const hashedItemStore = new HashedItemStore(store); - const state = new State(param, initial, hashedItemStore); - - const getUnhashedSearch = () => unhashQuery($location.search()); - - return { store, hashedItemStore, state, getUnhashedSearch }; - }; - }) - ); - - describe('Provider', () => { - it('should reset the state to the defaults', () => { - const { state, getUnhashedSearch } = setup({ initial: { message: ['test'] } }); - state.reset(); - const search = getUnhashedSearch(state); - expect(search).to.have.property('_s'); - expect(search._s).to.equal('(message:!(test))'); - expect(state.message).to.eql(['test']); - }); - - it('should apply the defaults upon initialization', () => { - const { state } = setup({ initial: { message: 'test' } }); - expect(state).to.have.property('message', 'test'); - }); - - it('should inherit from Events', () => { - const { state } = setup(); - expect(state).to.be.an(Events); - }); - - it('should emit an event if reset with changes', (done) => { - const { state } = setup({ initial: { message: ['test'] } }); - state.on('reset_with_changes', (keys) => { - expect(keys).to.eql(['message']); - done(); - }); - state.save(); - state.message = 'foo'; - state.reset(); - $rootScope.$apply(); - }); - - it('should not emit an event if reset without changes', () => { - const { state } = setup({ initial: { message: 'test' } }); - state.on('reset_with_changes', () => { - expect().fail(); - }); - state.save(); - state.message = 'test'; - state.reset(); - $rootScope.$apply(); - }); - }); - - describe('Search', () => { - it('should save to $location.search()', () => { - const { state, getUnhashedSearch } = setup({ initial: { test: 'foo' } }); - state.save(); - const search = getUnhashedSearch(state); - expect(search).to.have.property('_s'); - expect(search._s).to.equal('(test:foo)'); - }); - - it('should emit an event if changes are saved', (done) => { - const { state, getUnhashedSearch } = setup(); - state.on('save_with_changes', (keys) => { - expect(keys).to.eql(['test']); - done(); - }); - state.test = 'foo'; - state.save(); - getUnhashedSearch(state); - $rootScope.$apply(); - }); - }); - - describe('Fetch', () => { - it('should emit an event if changes are fetched', (done) => { - const { state } = setup(); - state.on('fetch_with_changes', (keys) => { - expect(keys).to.eql(['foo']); - done(); - }); - $location.search({ _s: '(foo:bar)' }); - state.fetch(); - expect(state).to.have.property('foo', 'bar'); - $rootScope.$apply(); - }); - - it('should have events that attach to scope', (done) => { - const { state } = setup(); - state.on('test', (message) => { - expect(message).to.equal('foo'); - done(); - }); - state.emit('test', 'foo'); - $rootScope.$apply(); - }); - - it('should fire listeners for #onUpdate() on #fetch()', (done) => { - const { state } = setup(); - state.on('fetch_with_changes', (keys) => { - expect(keys).to.eql(['foo']); - done(); - }); - $location.search({ _s: '(foo:bar)' }); - state.fetch(); - expect(state).to.have.property('foo', 'bar'); - $rootScope.$apply(); - }); - - it('should apply defaults to fetches', () => { - const { state } = setup({ initial: { message: 'test' } }); - $location.search({ _s: '(foo:bar)' }); - state.fetch(); - expect(state).to.have.property('foo', 'bar'); - expect(state).to.have.property('message', 'test'); - }); - - it('should call fetch when $routeUpdate is fired on $rootScope', () => { - const { state } = setup(); - const spy = sinon.spy(state, 'fetch'); - $rootScope.$emit('$routeUpdate', 'test'); - sinon.assert.calledOnce(spy); - }); - - it('should clear state when missing form URL', () => { - let stateObj; - const { state } = setup(); - - // set state via URL - $location.search({ _s: '(foo:(bar:baz))' }); - state.fetch(); - stateObj = state.toObject(); - expect(stateObj).to.eql({ foo: { bar: 'baz' } }); - - // ensure changing URL changes state - $location.search({ _s: '(one:two)' }); - state.fetch(); - stateObj = state.toObject(); - expect(stateObj).to.eql({ one: 'two' }); - - // remove search, state should be empty - $location.search({}); - state.fetch(); - stateObj = state.toObject(); - expect(stateObj).to.eql({}); - }); - - it('should clear state when it is invalid', () => { - let stateObj; - const { state } = setup(); - - $location.search({ _s: '' }); - state.fetch(); - stateObj = state.toObject(); - expect(stateObj).to.eql({}); - - $location.search({ _s: '!n' }); - state.fetch(); - stateObj = state.toObject(); - expect(stateObj).to.eql({}); - - $location.search({ _s: 'alert(1)' }); - state.fetch(); - stateObj = state.toObject(); - expect(stateObj).to.eql({}); - }); - - it('does not replace the state value on read', () => { - const { state } = setup(); - sinon.stub($location, 'search').callsFake((newSearch) => { - if (newSearch) { - return $location; - } else { - return { - [state.getQueryParamName()]: '(a:1)', - }; - } - }); - const replaceStub = sinon.stub($location, 'replace').returns($location); - - state.fetch(); - sinon.assert.notCalled(replaceStub); - }); - }); - - describe('Hashing', () => { - it('stores state values in a hashedItemStore, writing the hash to the url', () => { - const { state, hashedItemStore } = setup({ storeInHash: true }); - state.foo = 'bar'; - state.save(); - const urlVal = $location.search()[state.getQueryParamName()]; - - expect(isStateHash(urlVal)).to.be(true); - expect(hashedItemStore.getItem(urlVal)).to.eql(JSON.stringify({ foo: 'bar' })); - }); - - it('should replace rison in the URL with a hash', () => { - const { state, hashedItemStore } = setup({ storeInHash: true }); - const obj = { foo: { bar: 'baz' } }; - const rison = encodeRison(obj); - - $location.search({ _s: rison }); - state.fetch(); - - const urlVal = $location.search()._s; - expect(urlVal).to.not.be(rison); - expect(isStateHash(urlVal)).to.be(true); - expect(hashedItemStore.getItem(urlVal)).to.eql(JSON.stringify(obj)); - }); - - describe('error handling', () => { - it('notifies the user when a hash value does not map to a stored value', () => { - // Ideally, state.js shouldn't be tightly coupled to toastNotifications. Instead, it - // should notify its consumer of this error state and the consumer should be responsible - // for notifying the user of the error. This test verifies the side effect of the error - // until we can remove this coupling. - - // Clear existing toasts. - toastNotifications.list.splice(0); - - const { state } = setup({ storeInHash: true }); - const search = $location.search(); - const badHash = createStateHash('{"a": "b"}', () => null); - - search[state.getQueryParamName()] = badHash; - $location.search(search); - - expect(toastNotifications.list).to.have.length(0); - state.fetch(); - expect(toastNotifications.list).to.have.length(1); - expect(toastNotifications.list[0].title).to.match(/use the share functionality/i); - }); - - it.skip('triggers fatal error linking to github when setting item fails', () => { - // NOTE: this test needs to be written in jest and removed from the browser ones - // More info could be read in the opened issue: - // https://github.com/elastic/kibana/issues/22751 - const { state, hashedItemStore } = setup({ storeInHash: true }); - const fatalErrorStub = sandbox.stub(); - Object.defineProperty(FatalErrorNS, 'fatalError', { - writable: true, - value: fatalErrorStub, - }); - - sandbox.stub(hashedItemStore, 'setItem').returns(false); - state.toQueryParam(); - sinon.assert.calledOnce(fatalErrorStub); - sinon.assert.calledWith( - fatalErrorStub, - sinon.match((error) => error instanceof Error && error.message.includes('github.com')) - ); - }); - - it('translateHashToRison should gracefully fallback if parameter can not be parsed', () => { - const { state, hashedItemStore } = setup({ storeInHash: false }); - - expect(state.translateHashToRison('(a:b)')).to.be('(a:b)'); - expect(state.translateHashToRison('')).to.be(''); - - const existingHash = createStateHash('{"a": "b"}', () => null); - hashedItemStore.setItem(existingHash, '{"a": "b"}'); - - const nonExistingHash = createStateHash('{"b": "c"}', () => null); - - expect(state.translateHashToRison(existingHash)).to.be('(a:b)'); - expect(state.translateHashToRison(nonExistingHash)).to.be('!n'); - }); - }); - }); - }); - - describe('Disabled with persisted state', () => { - let state; - let $location; - let $rootScope; - const stateParam = '_config_test'; - - const getLocationState = () => { - const search = $location.search(); - return search[stateParam]; - }; - - beforeEach( - ngMock.module('kibana', function (stateManagementConfigProvider) { - stateManagementConfigProvider.disable(); - }) - ); - beforeEach( - ngMock.inject(function (_$rootScope_, _$location_, Private, config) { - const State = Private(StateProvider); - $location = _$location_; - $rootScope = _$rootScope_; - - sinon.stub(config, 'get').withArgs('state:storeInSessionStorage').returns(false); - - class MockPersistedState extends State { - _persistAcrossApps = true; - } - - MockPersistedState.prototype._persistAcrossApps = true; - - state = new MockPersistedState(stateParam); - }) - ); - - describe('changing state', () => { - const methods = ['save', 'replace', 'reset']; - - methods.forEach((method) => { - it(`${method} should not change the URL`, () => { - $location.search({ _s: '(foo:bar)' }); - state[method](); - $rootScope.$apply(); - expect(getLocationState()).to.be(undefined); - }); - }); - }); - - describe('reading state', () => { - it('should not change the URL', () => { - const saveSpy = sinon.spy(state, 'save'); - $location.search({ _s: '(foo:bar)' }); - state.fetch(); - $rootScope.$apply(); - sinon.assert.notCalled(saveSpy); - expect(getLocationState()).to.be(undefined); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/state_management/__tests__/state_monitor_factory.js b/src/legacy/ui/public/state_management/__tests__/state_monitor_factory.js deleted file mode 100644 index dc00d4e05e82f..0000000000000 --- a/src/legacy/ui/public/state_management/__tests__/state_monitor_factory.js +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import { EventEmitter } from 'events'; -import { cloneDeep } from 'lodash'; -import { stateMonitorFactory } from '../state_monitor_factory'; - -describe('stateMonitorFactory', function () { - const noop = () => {}; - const eventTypes = ['save_with_changes', 'reset_with_changes', 'fetch_with_changes']; - - let mockState; - - function setState(mockState, obj, emit = true) { - mockState.toJSON = () => cloneDeep(obj); - if (emit) mockState.emit(eventTypes[0]); - } - - function createMockState(state = {}) { - const mockState = new EventEmitter(); - setState(mockState, state, false); - return mockState; - } - - beforeEach(() => { - mockState = createMockState({}); - }); - - it('should have a create method', function () { - expect(stateMonitorFactory).to.have.property('create'); - expect(stateMonitorFactory.create).to.be.a('function'); - }); - - describe('factory creation', function () { - it('should not call onChange with only the state', function () { - const monitor = stateMonitorFactory.create(mockState); - const changeStub = sinon.stub(); - monitor.onChange(changeStub); - sinon.assert.notCalled(changeStub); - }); - - it('should not call onChange with matching defaultState', function () { - const monitor = stateMonitorFactory.create(mockState, {}); - const changeStub = sinon.stub(); - monitor.onChange(changeStub); - sinon.assert.notCalled(changeStub); - }); - - it('should call onChange with differing defaultState', function () { - const monitor = stateMonitorFactory.create(mockState, { test: true }); - const changeStub = sinon.stub(); - monitor.onChange(changeStub); - sinon.assert.calledOnce(changeStub); - }); - }); - - describe('instance', function () { - let monitor; - - beforeEach(() => { - monitor = stateMonitorFactory.create(mockState); - }); - - describe('onChange', function () { - it('should throw if not given a handler function', function () { - const fn = () => monitor.onChange('not a function'); - expect(fn).to.throwException(/must be a function/); - }); - - eventTypes.forEach((eventType) => { - describe(`when ${eventType} is emitted`, function () { - let handlerFn; - - beforeEach(() => { - handlerFn = sinon.stub(); - monitor.onChange(handlerFn); - sinon.assert.notCalled(handlerFn); - }); - - it('should get called', function () { - mockState.emit(eventType); - sinon.assert.calledOnce(handlerFn); - }); - - it('should be given the state status', function () { - mockState.emit(eventType); - const args = handlerFn.firstCall.args; - expect(args[0]).to.be.an('object'); - }); - - it('should be given the event type', function () { - mockState.emit(eventType); - const args = handlerFn.firstCall.args; - expect(args[1]).to.equal(eventType); - }); - - it('should be given the changed keys', function () { - const keys = ['one', 'two', 'three']; - mockState.emit(eventType, keys); - const args = handlerFn.firstCall.args; - expect(args[2]).to.equal(keys); - }); - }); - }); - }); - - describe('ignoreProps', function () { - it('should not set status to dirty when ignored properties change', function () { - let status; - const mockState = createMockState({ messages: { world: 'hello', foo: 'bar' } }); - const monitor = stateMonitorFactory.create(mockState); - const changeStub = sinon.stub(); - monitor.ignoreProps('messages.world'); - monitor.onChange(changeStub); - sinon.assert.notCalled(changeStub); - - // update the ignored state prop - setState(mockState, { messages: { world: 'howdy', foo: 'bar' } }); - sinon.assert.calledOnce(changeStub); - status = changeStub.firstCall.args[0]; - expect(status).to.have.property('clean', true); - expect(status).to.have.property('dirty', false); - - // update a prop that is not ignored - setState(mockState, { messages: { world: 'howdy', foo: 'baz' } }); - sinon.assert.calledTwice(changeStub); - status = changeStub.secondCall.args[0]; - expect(status).to.have.property('clean', false); - expect(status).to.have.property('dirty', true); - }); - }); - - describe('setInitialState', function () { - let changeStub; - - beforeEach(() => { - changeStub = sinon.stub(); - monitor.onChange(changeStub); - sinon.assert.notCalled(changeStub); - }); - - it('should throw if no state is provided', function () { - const fn = () => monitor.setInitialState(); - expect(fn).to.throwException(/must be an object/); - }); - - it('should throw if given the wrong type', function () { - const fn = () => monitor.setInitialState([]); - expect(fn).to.throwException(/must be an object/); - }); - - it('should trigger the onChange handler', function () { - monitor.setInitialState({ new: 'state' }); - sinon.assert.calledOnce(changeStub); - }); - - it('should change the status with differing state', function () { - monitor.setInitialState({ new: 'state' }); - sinon.assert.calledOnce(changeStub); - - const status = changeStub.firstCall.args[0]; - expect(status).to.have.property('clean', false); - expect(status).to.have.property('dirty', true); - }); - - it('should not trigger the onChange handler without state change', function () { - monitor.setInitialState(cloneDeep(mockState.toJSON())); - sinon.assert.notCalled(changeStub); - }); - }); - - describe('status object', function () { - let handlerFn; - - beforeEach(() => { - handlerFn = sinon.stub(); - monitor.onChange(handlerFn); - }); - - it('should be clean by default', function () { - mockState.emit(eventTypes[0]); - const status = handlerFn.firstCall.args[0]; - expect(status).to.have.property('clean', true); - expect(status).to.have.property('dirty', false); - }); - - it('should be dirty when state changes', function () { - setState(mockState, { message: 'i am dirty now' }); - const status = handlerFn.firstCall.args[0]; - expect(status).to.have.property('clean', false); - expect(status).to.have.property('dirty', true); - }); - - it('should be clean when state is reset', function () { - const defaultState = { message: 'i am the original state' }; - const handlerFn = sinon.stub(); - - let status; - - // initial state and monitor setup - const mockState = createMockState(defaultState); - const monitor = stateMonitorFactory.create(mockState); - monitor.onChange(handlerFn); - sinon.assert.notCalled(handlerFn); - - // change the state and emit an event - setState(mockState, { message: 'i am dirty now' }); - sinon.assert.calledOnce(handlerFn); - status = handlerFn.firstCall.args[0]; - expect(status).to.have.property('clean', false); - expect(status).to.have.property('dirty', true); - - // reset the state and emit an event - setState(mockState, defaultState); - sinon.assert.calledTwice(handlerFn); - status = handlerFn.secondCall.args[0]; - expect(status).to.have.property('clean', true); - expect(status).to.have.property('dirty', false); - }); - }); - - describe('destroy', function () { - let stateSpy; - - beforeEach(() => { - stateSpy = sinon.spy(mockState, 'off'); - sinon.assert.notCalled(stateSpy); - }); - - it('should remove the listeners', function () { - monitor.onChange(noop); - monitor.destroy(); - sinon.assert.callCount(stateSpy, eventTypes.length); - eventTypes.forEach((eventType) => { - sinon.assert.calledWith(stateSpy, eventType); - }); - }); - - it('should stop the instance from being used any more', function () { - monitor.onChange(noop); - monitor.destroy(); - const fn = () => monitor.onChange(noop); - expect(fn).to.throwException(/has been destroyed/); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/state_management/app_state.d.ts b/src/legacy/ui/public/state_management/app_state.d.ts deleted file mode 100644 index ddd0b90710766..0000000000000 --- a/src/legacy/ui/public/state_management/app_state.d.ts +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { State } from './state'; - -export class AppState extends State {} - -export type AppStateClass< - TAppState extends AppState = AppState, - TDefaults = { [key: string]: any } -> = new (defaults: TDefaults) => TAppState; diff --git a/src/legacy/ui/public/state_management/app_state.js b/src/legacy/ui/public/state_management/app_state.js deleted file mode 100644 index ec680d163b9da..0000000000000 --- a/src/legacy/ui/public/state_management/app_state.js +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * @name AppState - * - * @extends State - * - * @description Inherits State, which inherits Events. This class seems to be - * concerned with mapping "props" to PersistedState instances, and surfacing the - * ability to destroy those mappings. - */ - -import { uiModules } from '../modules'; -import { StateProvider } from './state'; -import { PersistedState } from '../../../../plugins/visualizations/public'; -import { createLegacyClass } from '../utils/legacy_class'; - -const urlParam = '_a'; - -export function AppStateProvider(Private, $location) { - const State = Private(StateProvider); - - let persistedStates; - let eventUnsubscribers; - - createLegacyClass(AppState).inherits(State); - function AppState(defaults) { - // Initialize persistedStates. This object maps "prop" names to - // PersistedState instances. These are used to make properties "stateful". - persistedStates = {}; - - // Initialize eventUnsubscribers. These will be called in `destroy`, to - // remove handlers for the 'change' and 'fetch_with_changes' events which - // are dispatched via the rootScope. - eventUnsubscribers = []; - - AppState.Super.call(this, urlParam, defaults); - AppState.getAppState._set(this); - } - - // if the url param is missing, write it back - AppState.prototype._persistAcrossApps = false; - - AppState.prototype.destroy = function () { - AppState.Super.prototype.destroy.call(this); - AppState.getAppState._set(null); - - eventUnsubscribers.forEach((listener) => listener()); - }; - - /** - * @returns PersistedState instance. - */ - AppState.prototype.makeStateful = function (prop) { - if (persistedStates[prop]) return persistedStates[prop]; - const self = this; - - // set up the ui state - persistedStates[prop] = new PersistedState(); - - // update the app state when the stateful instance changes - const updateOnChange = function () { - const replaceState = false; // TODO: debouncing logic - self[prop] = persistedStates[prop].getChanges(); - // Save state to the URL. - self.save(replaceState); - }; - const handlerOnChange = (method) => persistedStates[prop][method]('change', updateOnChange); - handlerOnChange('on'); - eventUnsubscribers.push(() => handlerOnChange('off')); - - // update the stateful object when the app state changes - const persistOnChange = function (changes) { - if (!changes) return; - - if (changes.indexOf(prop) !== -1) { - persistedStates[prop].set(self[prop]); - } - }; - const handlePersist = (method) => this[method]('fetch_with_changes', persistOnChange); - handlePersist('on'); - eventUnsubscribers.push(() => handlePersist('off')); - - // if the thing we're making stateful has an appState value, write to persisted state - if (self[prop]) persistedStates[prop].setSilent(self[prop]); - - return persistedStates[prop]; - }; - - AppState.getAppState = (function () { - let currentAppState; - - function get() { - return currentAppState; - } - - // Checks to see if the appState might already exist, even if it hasn't been newed up - get.previouslyStored = function () { - const search = $location.search(); - return search[urlParam] ? true : false; - }; - - get._set = function (current) { - currentAppState = current; - }; - - return get; - })(); - - return AppState; -} - -uiModules - .get('kibana/global_state') - .factory('AppState', function (Private) { - return Private(AppStateProvider); - }) - .service('getAppState', function (Private) { - return Private(AppStateProvider).getAppState; - }); diff --git a/src/legacy/ui/public/state_management/config_provider.js b/src/legacy/ui/public/state_management/config_provider.js deleted file mode 100644 index 68de449a57a56..0000000000000 --- a/src/legacy/ui/public/state_management/config_provider.js +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * @name stateManagementConfig - * - * @description Allows apps to configure state management - */ - -import { uiModules } from '../modules'; - -export class StateManagementConfigProvider { - _enabled = true; - - $get(/* inject stuff */) { - return { - enabled: this._enabled, - }; - } - - disable() { - this._enabled = false; - } - - enable() { - this._enabled = true; - } -} - -uiModules - .get('kibana/state_management') - .provider('stateManagementConfig', StateManagementConfigProvider); diff --git a/src/legacy/ui/public/state_management/global_state.d.ts b/src/legacy/ui/public/state_management/global_state.d.ts deleted file mode 100644 index 66a85d88956c7..0000000000000 --- a/src/legacy/ui/public/state_management/global_state.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { State } from './state'; - -export type GlobalState = State; diff --git a/src/legacy/ui/public/state_management/global_state.js b/src/legacy/ui/public/state_management/global_state.js deleted file mode 100644 index 0e8dfe40d5950..0000000000000 --- a/src/legacy/ui/public/state_management/global_state.js +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { StateProvider } from './state'; -import { uiModules } from '../modules'; -import { createLegacyClass } from '../utils/legacy_class'; - -const module = uiModules.get('kibana/global_state'); - -export function GlobalStateProvider(Private) { - const State = Private(StateProvider); - - createLegacyClass(GlobalState).inherits(State); - function GlobalState(defaults) { - GlobalState.Super.call(this, '_g', defaults); - } - - // if the url param is missing, write it back - GlobalState.prototype._persistAcrossApps = true; - - return new GlobalState(); -} - -module.service('globalState', function (Private) { - return Private(GlobalStateProvider); -}); diff --git a/src/legacy/ui/public/state_management/state.d.ts b/src/legacy/ui/public/state_management/state.d.ts deleted file mode 100644 index b31862b681f55..0000000000000 --- a/src/legacy/ui/public/state_management/state.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export class State { - [key: string]: any; - translateHashToRison: (stateHashOrRison: string | string[]) => string | string[]; - getQueryParamName: () => string; -} diff --git a/src/legacy/ui/public/state_management/state.js b/src/legacy/ui/public/state_management/state.js deleted file mode 100644 index d91834adb4a79..0000000000000 --- a/src/legacy/ui/public/state_management/state.js +++ /dev/null @@ -1,359 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * @name State - * - * @extends Events - * - * @description Persists generic "state" to and reads it from the URL. - */ - -import _ from 'lodash'; -import { i18n } from '@kbn/i18n'; -import angular from 'angular'; -import rison from 'rison-node'; -import { EventsProvider } from '../events'; -import { fatalError, toastNotifications } from '../notify'; -import './config_provider'; -import { createLegacyClass } from '../utils/legacy_class'; -import { - hashedItemStore, - isStateHash, - createStateHash, - applyDiff, -} from '../../../../plugins/kibana_utils/public'; - -export function StateProvider( - Private, - $rootScope, - $location, - stateManagementConfig, - config, - kbnUrl, - $injector -) { - const Events = Private(EventsProvider); - - const isDummyRoute = () => - $injector.has('$route') && - $injector.get('$route').current && - $injector.get('$route').current.outerAngularWrapperRoute; - - createLegacyClass(State).inherits(Events); - function State(urlParam, defaults, _hashedItemStore = hashedItemStore) { - State.Super.call(this); - - this.setDefaults(defaults); - this._urlParam = urlParam || '_s'; - this._hashedItemStore = _hashedItemStore; - - // When the URL updates we need to fetch the values from the URL - this._cleanUpListeners = [ - // partial route update, no app reload - $rootScope.$on('$routeUpdate', () => { - this.fetch(); - }), - - // beginning of full route update, new app will be initialized before - // $routeChangeSuccess or $routeChangeError - $rootScope.$on('$routeChangeStart', () => { - if (!this._persistAcrossApps) { - this.destroy(); - } - }), - - $rootScope.$on('$routeChangeSuccess', () => { - if (this._persistAcrossApps) { - this.fetch(); - } - }), - ]; - - // Initialize the State with fetch - this.fetch(); - } - - State.prototype._readFromURL = function () { - const search = $location.search(); - const urlVal = search[this._urlParam]; - - if (!urlVal) { - return null; - } - - if (isStateHash(urlVal)) { - return this._parseStateHash(urlVal); - } - - let risonEncoded; - let unableToParse; - try { - risonEncoded = rison.decode(urlVal); - } catch (e) { - unableToParse = true; - } - - if (unableToParse) { - toastNotifications.addDanger( - i18n.translate('common.ui.stateManagement.unableToParseUrlErrorMessage', { - defaultMessage: 'Unable to parse URL', - }) - ); - search[this._urlParam] = this.toQueryParam(this._defaults); - $location.search(search).replace(); - } - - if (!risonEncoded) { - return null; - } - - if (this.isHashingEnabled()) { - // RISON can find its way into the URL any number of ways, including the navbar links or - // shared urls with the entire state embedded. These values need to be translated into - // hashes and replaced in the browser history when state-hashing is enabled - search[this._urlParam] = this.toQueryParam(risonEncoded); - $location.search(search).replace(); - } - - return risonEncoded; - }; - - /** - * Fetches the state from the url - * @returns {void} - */ - State.prototype.fetch = function () { - if (!stateManagementConfig.enabled) { - return; - } - - let stash = this._readFromURL(); - - // nothing to read from the url? save if ordered to persist, but only if it's not on a wrapper route - if (stash === null) { - if (this._persistAcrossApps) { - return this.save(); - } else { - stash = {}; - } - } - - _.defaults(stash, this._defaults); - // apply diff to state from stash, will change state in place via side effect - const diffResults = applyDiff(this, stash); - - if (!isDummyRoute() && diffResults.keys.length) { - this.emit('fetch_with_changes', diffResults.keys); - } - }; - - /** - * Saves the state to the url - * @returns {void} - */ - State.prototype.save = function (replace) { - if (!stateManagementConfig.enabled) { - return; - } - - if (isDummyRoute()) { - return; - } - - let stash = this._readFromURL(); - const state = this.toObject(); - replace = replace || false; - - if (!stash) { - replace = true; - stash = {}; - } - - // apply diff to state from stash, will change state in place via side effect - const diffResults = applyDiff(stash, _.defaults({}, state, this._defaults)); - - if (diffResults.keys.length) { - this.emit('save_with_changes', diffResults.keys); - } - - // persist the state in the URL - const search = $location.search(); - search[this._urlParam] = this.toQueryParam(state); - if (replace) { - $location.search(search).replace(); - } else { - $location.search(search); - } - }; - - /** - * Calls save with a forced replace - * @returns {void} - */ - State.prototype.replace = function () { - if (!stateManagementConfig.enabled) { - return; - } - - this.save(true); - }; - - /** - * Resets the state to the defaults - * - * @returns {void} - */ - State.prototype.reset = function () { - if (!stateManagementConfig.enabled) { - return; - } - - kbnUrl.removeParam(this.getQueryParamName()); - // apply diff to attributes from defaults, this is side effecting so - // it will change the state in place. - const diffResults = applyDiff(this, this._defaults); - if (diffResults.keys.length) { - this.emit('reset_with_changes', diffResults.keys); - } - this.save(); - }; - - /** - * Cleans up the state object - * @returns {void} - */ - State.prototype.destroy = function () { - this.off(); // removes all listeners - - // Removes the $routeUpdate listener - this._cleanUpListeners.forEach((listener) => listener(this)); - }; - - State.prototype.setDefaults = function (defaults) { - this._defaults = defaults || {}; - }; - - /** - * Parse the state hash to it's unserialized value. Hashes are restored - * to their pre-hashed state. - * - * @param {string} stateHash - state hash value from the query string. - * @return {any} - the stored value, or null if hash does not resolve. - */ - State.prototype._parseStateHash = function (stateHash) { - const json = this._hashedItemStore.getItem(stateHash); - if (json === null) { - toastNotifications.addDanger( - i18n.translate('common.ui.stateManagement.unableToRestoreUrlErrorMessage', { - defaultMessage: - 'Unable to completely restore the URL, be sure to use the share functionality.', - }) - ); - } - - return JSON.parse(json); - }; - - /** - * Lookup the value for a hash and return it's value in rison format or just - * return passed argument if it's not recognized as state hash. - * - * @param {string} stateHashOrRison - either state hash value or rison string. - * @return {string} rison - */ - State.prototype.translateHashToRison = function (stateHashOrRison) { - if (isStateHash(stateHashOrRison)) { - return rison.encode(this._parseStateHash(stateHashOrRison)); - } - - return stateHashOrRison; - }; - - State.prototype.isHashingEnabled = function () { - return !!config.get('state:storeInSessionStorage'); - }; - - /** - * Produce the hash version of the state in it's current position - * - * @return {string} - */ - State.prototype.toQueryParam = function (state = this.toObject()) { - if (!this.isHashingEnabled()) { - return rison.encode(state); - } - - // We need to strip out Angular-specific properties. - const json = angular.toJson(state); - const hash = createStateHash(json); - const isItemSet = this._hashedItemStore.setItem(hash, json); - - if (isItemSet) { - return hash; - } - - // If we ran out of space trying to persist the state, notify the user. - const message = i18n.translate( - 'common.ui.stateManagement.unableToStoreHistoryInSessionErrorMessage', - { - defaultMessage: - 'Kibana is unable to store history items in your session ' + - `because it is full and there don't seem to be items any items safe ` + - 'to delete.\n\n' + - 'This can usually be fixed by moving to a fresh tab, but could ' + - 'be caused by a larger issue. If you are seeing this message regularly, ' + - 'please file an issue at {gitHubIssuesUrl}.', - values: { gitHubIssuesUrl: 'https://github.com/elastic/kibana/issues' }, - } - ); - fatalError(new Error(message)); - }; - - /** - * Get the query string parameter name where this state writes and reads - * @return {string} - */ - State.prototype.getQueryParamName = function () { - return this._urlParam; - }; - - /** - * Returns an object with each property name and value corresponding to the entries in this collection - * excluding fields started from '$', '_' and all methods - * - * @return {object} - */ - State.prototype.toObject = function () { - return _.omitBy(this, (value, key) => { - return key.charAt(0) === '$' || key.charAt(0) === '_' || _.isFunction(value); - }); - }; - - /** Alias for method 'toObject' - * - * @obsolete Please use 'toObject' method instead - * @return {object} - */ - State.prototype.toJSON = function () { - return this.toObject(); - }; - - return State; -} diff --git a/src/legacy/ui/public/state_management/state_monitor_factory.ts b/src/legacy/ui/public/state_management/state_monitor_factory.ts deleted file mode 100644 index 968ececfe3be5..0000000000000 --- a/src/legacy/ui/public/state_management/state_monitor_factory.ts +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { set } from '@elastic/safer-lodash-set'; -import { cloneDeep, isEqual, isPlainObject } from 'lodash'; -import { State } from './state'; - -export const stateMonitorFactory = { - create: ( - state: State, - customInitialState: TStateDefault - ) => stateMonitor(state, customInitialState), -}; - -interface StateStatus { - clean: boolean; - dirty: boolean; -} - -export interface StateMonitor { - setInitialState: (innerCustomInitialState: TStateDefault) => void; - ignoreProps: (props: string[] | string) => void; - onChange: (callback: ChangeHandlerFn) => StateMonitor; - destroy: () => void; -} - -type ChangeHandlerFn = (status: StateStatus, type: string | null, keys: string[]) => void; - -function stateMonitor( - state: State, - customInitialState: TStateDefault -): StateMonitor { - let destroyed = false; - let ignoredProps: string[] = []; - let changeHandlers: ChangeHandlerFn[] | undefined = []; - let initialState: TStateDefault; - - setInitialState(customInitialState); - - function setInitialState(innerCustomInitialState: TStateDefault) { - // state.toJSON returns a reference, clone so we can mutate it safely - initialState = cloneDeep(innerCustomInitialState) || cloneDeep(state.toJSON()); - } - - function removeIgnoredProps(innerState: TStateDefault) { - ignoredProps.forEach((path) => { - set(innerState, path, true); - }); - return innerState; - } - - function getStatus(): StateStatus { - // state.toJSON returns a reference, clone so we can mutate it safely - const currentState = removeIgnoredProps(cloneDeep(state.toJSON())); - const isClean = isEqual(currentState, initialState); - - return { - clean: isClean, - dirty: !isClean, - }; - } - - function dispatchChange(type: string | null = null, keys: string[] = []) { - const status = getStatus(); - if (!changeHandlers) { - throw new Error('Change handlers is undefined, this object has been destroyed'); - } - changeHandlers.forEach((changeHandler) => { - changeHandler(status, type, keys); - }); - } - - function dispatchFetch(keys: string[]) { - dispatchChange('fetch_with_changes', keys); - } - - function dispatchSave(keys: string[]) { - dispatchChange('save_with_changes', keys); - } - - function dispatchReset(keys: string[]) { - dispatchChange('reset_with_changes', keys); - } - - return { - setInitialState(innerCustomInitialState: TStateDefault) { - if (!isPlainObject(innerCustomInitialState)) { - throw new TypeError('The default state must be an object'); - } - - // check the current status - const previousStatus = getStatus(); - - // update the initialState and apply ignoredProps - setInitialState(innerCustomInitialState); - removeIgnoredProps(initialState); - - // fire the change handler if the status has changed - if (!isEqual(previousStatus, getStatus())) { - dispatchChange(); - } - }, - - ignoreProps(props: string[] | string) { - ignoredProps = ignoredProps.concat(props); - removeIgnoredProps(initialState); - return this; - }, - - onChange(callback: ChangeHandlerFn) { - if (destroyed || !changeHandlers) { - throw new Error('Monitor has been destroyed'); - } - if (typeof callback !== 'function') { - throw new Error('onChange handler must be a function'); - } - - changeHandlers.push(callback); - - // Listen for state events. - state.on('fetch_with_changes', dispatchFetch); - state.on('save_with_changes', dispatchSave); - state.on('reset_with_changes', dispatchReset); - - // if the state is already dirty, fire the change handler immediately - const status = getStatus(); - if (status.dirty) { - dispatchChange(); - } - - return this; - }, - - destroy() { - destroyed = true; - changeHandlers = undefined; - state.off('fetch_with_changes', dispatchFetch); - state.off('save_with_changes', dispatchSave); - state.off('reset_with_changes', dispatchReset); - }, - }; -} diff --git a/src/legacy/ui/public/system_api/__tests__/system_api.js b/src/legacy/ui/public/system_api/__tests__/system_api.js deleted file mode 100644 index 816024f13f8b2..0000000000000 --- a/src/legacy/ui/public/system_api/__tests__/system_api.js +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import { - addSystemApiHeader, - isSystemApiRequest, -} from '../../../../../plugins/kibana_legacy/public'; - -describe('system_api', () => { - describe('#addSystemApiHeader', () => { - it('adds the correct system API header', () => { - const headers = { - 'kbn-version': '4.6.0', - }; - const newHeaders = addSystemApiHeader(headers); - - expect(newHeaders).to.have.property('kbn-system-request'); - expect(newHeaders['kbn-system-request']).to.be(true); - - expect(newHeaders).to.have.property('kbn-version'); - expect(newHeaders['kbn-version']).to.be('4.6.0'); - }); - }); - - describe('#isSystemApiRequest', () => { - it('returns true for a system HTTP request', () => { - const mockRequest = { - headers: { - 'kbn-system-request': true, - }, - }; - expect(isSystemApiRequest(mockRequest)).to.be(true); - }); - - it('returns true for a legacy system API HTTP request', () => { - const mockRequest = { - headers: { - 'kbn-system-api': true, - }, - }; - expect(isSystemApiRequest(mockRequest)).to.be(true); - }); - - it('returns false for a non-system API HTTP request', () => { - const mockRequest = { - headers: {}, - }; - expect(isSystemApiRequest(mockRequest)).to.be(false); - }); - }); -}); diff --git a/src/legacy/ui/public/system_api/index.js b/src/legacy/ui/public/system_api/index.js deleted file mode 100644 index 6361c0ea6c69a..0000000000000 --- a/src/legacy/ui/public/system_api/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { addSystemApiHeader, isSystemApiRequest } from '../../../../plugins/kibana_legacy/public'; diff --git a/src/legacy/ui/public/timefilter/index.ts b/src/legacy/ui/public/timefilter/index.ts deleted file mode 100644 index 83795c73112be..0000000000000 --- a/src/legacy/ui/public/timefilter/index.ts +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import uiRoutes from 'ui/routes'; -import { npStart } from 'ui/new_platform'; - -import { TimefilterContract, TimeHistoryContract } from '../../../../plugins/data/public'; -import { registerTimefilterWithGlobalState } from './setup_router'; - -export { - getTime, - InputTimeRange, - TimeHistoryContract, - TimefilterContract, -} from '../../../../plugins/data/public'; -export type Timefilter = TimefilterContract; -export type TimeHistory = TimeHistoryContract; -export const timeHistory = npStart.plugins.data.query.timefilter.history; -export const timefilter = npStart.plugins.data.query.timefilter.timefilter; - -uiRoutes.addSetupWork((globalState, $rootScope) => { - return registerTimefilterWithGlobalState(timefilter, globalState, $rootScope); -}); diff --git a/src/legacy/ui/public/timefilter/setup_router.test.js b/src/legacy/ui/public/timefilter/setup_router.test.js deleted file mode 100644 index 2ae9a053ae2db..0000000000000 --- a/src/legacy/ui/public/timefilter/setup_router.test.js +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { registerTimefilterWithGlobalState } from './setup_router'; - -jest.mock('../../../../plugins/kibana_legacy/public', () => ({ - subscribeWithScope: jest.fn(), -})); - -describe('registerTimefilterWithGlobalState()', () => { - it('should always use iso8601 strings', async () => { - const setTime = jest.fn(); - const timefilter = { - setTime, - setRefreshInterval: jest.fn(), - getRefreshIntervalUpdate$: jest.fn(), - getTimeUpdate$: jest.fn(), - }; - - const globalState = { - time: { - from: '2017-09-07T20:12:04.011Z', - to: '2017-09-07T20:18:55.733Z', - }, - on: (eventName, callback) => { - callback(); - }, - }; - - const rootScope = { - $on: jest.fn(), - }; - - registerTimefilterWithGlobalState(timefilter, globalState, rootScope); - - expect(setTime.mock.calls.length).toBe(2); - expect(setTime.mock.calls[1][0]).toEqual(globalState.time); - }); -}); diff --git a/src/legacy/ui/public/timefilter/setup_router.ts b/src/legacy/ui/public/timefilter/setup_router.ts deleted file mode 100644 index 7c25c6aa3166e..0000000000000 --- a/src/legacy/ui/public/timefilter/setup_router.ts +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import { IScope } from 'angular'; -import moment from 'moment'; -import chrome from 'ui/chrome'; -import { Subscription } from 'rxjs'; -import { fatalError } from 'ui/notify/fatal_error'; -import { subscribeWithScope } from '../../../../plugins/kibana_legacy/public'; -import { - RefreshInterval, - TimeRange, - TimefilterContract, - UI_SETTINGS, -} from '../../../../plugins/data/public'; - -// TODO -// remove everything underneath once globalState is no longer an angular service -// and listener can be registered without angular. -function convertISO8601(stringTime: string): string { - const obj = moment(stringTime, 'YYYY-MM-DDTHH:mm:ss.SSSZ', true); - return obj.isValid() ? obj.toISOString() : stringTime; -} - -export function getTimefilterConfig() { - const settings = chrome.getUiSettingsClient(); - return { - timeDefaults: settings.get('timepicker:timeDefaults'), - refreshIntervalDefaults: settings.get(UI_SETTINGS.TIMEPICKER_REFRESH_INTERVAL_DEFAULTS), - }; -} - -export const registerTimefilterWithGlobalStateFactory = ( - timefilter: TimefilterContract, - globalState: any, - $rootScope: IScope -) => { - // settings have to be re-fetched here, to make sure that settings changed by overrideLocalDefault are taken into account. - const config = getTimefilterConfig(); - timefilter.setTime(_.defaults(globalState.time || {}, config.timeDefaults)); - timefilter.setRefreshInterval( - _.defaults(globalState.refreshInterval || {}, config.refreshIntervalDefaults) - ); - - globalState.on('fetch_with_changes', () => { - // clone and default to {} in one - const newTime: TimeRange = _.defaults({}, globalState.time, config.timeDefaults); - const newRefreshInterval: RefreshInterval = _.defaults( - {}, - globalState.refreshInterval, - config.refreshIntervalDefaults - ); - - if (newTime) { - if (newTime.to) newTime.to = convertISO8601(newTime.to); - if (newTime.from) newTime.from = convertISO8601(newTime.from); - } - - timefilter.setTime(newTime); - timefilter.setRefreshInterval(newRefreshInterval); - }); - - const updateGlobalStateWithTime = () => { - globalState.time = timefilter.getTime(); - globalState.refreshInterval = timefilter.getRefreshInterval(); - globalState.save(); - }; - - const subscriptions = new Subscription(); - subscriptions.add( - subscribeWithScope( - $rootScope, - timefilter.getRefreshIntervalUpdate$(), - { - next: updateGlobalStateWithTime, - }, - fatalError - ) - ); - - subscriptions.add( - subscribeWithScope( - $rootScope, - timefilter.getTimeUpdate$(), - { - next: updateGlobalStateWithTime, - }, - fatalError - ) - ); - - $rootScope.$on('$destroy', () => { - subscriptions.unsubscribe(); - }); -}; - -// Currently some parts of Kibana (index patterns, timefilter) rely on addSetupWork in the uiRouter -// and require it to be executed to properly function. -// This function is exposed for applications that do not use uiRoutes like APM -// Kibana issue https://github.com/elastic/kibana/issues/19110 tracks the removal of this dependency on uiRouter -export const registerTimefilterWithGlobalState = _.once(registerTimefilterWithGlobalStateFactory); diff --git a/src/legacy/ui/public/url/__tests__/extract_app_path_and_id.js b/src/legacy/ui/public/url/__tests__/extract_app_path_and_id.js deleted file mode 100644 index 965e8f4bc9f38..0000000000000 --- a/src/legacy/ui/public/url/__tests__/extract_app_path_and_id.js +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; - -import { extractAppPathAndId } from '../extract_app_path_and_id'; - -describe('extractAppPathAndId', function () { - describe('from an absolute url with a base path', function () { - describe('with a base path', function () { - const basePath = '/gza'; - const absoluteUrl = 'http://www.test.com:5601/gza/app/appId#appPathIsHere?query=here'; - it('extracts app path', function () { - expect(extractAppPathAndId(absoluteUrl, basePath).appPath).to.be( - 'appPathIsHere?query=here' - ); - }); - - it('extracts app id', function () { - expect(extractAppPathAndId(absoluteUrl, basePath).appId).to.be('appId'); - }); - - it('returns an empty object when there is no app path', function () { - const appPathAndId = extractAppPathAndId('http://www.test.com:5601/gza/noapppath'); - expect(appPathAndId.appId).to.be(undefined); - expect(appPathAndId.appPath).to.be(undefined); - }); - }); - - describe('without a base path', function () { - const absoluteUrl = 'http://www.test.com:5601/app/appId#appPathIsHere?query=here'; - it('extracts app path', function () { - expect(extractAppPathAndId(absoluteUrl).appPath).to.be('appPathIsHere?query=here'); - }); - - it('extracts app id', function () { - expect(extractAppPathAndId(absoluteUrl).appId).to.be('appId'); - }); - - it('returns an empty object when there is no app path', function () { - const appPathAndId = extractAppPathAndId('http://www.test.com:5601/noapppath'); - expect(appPathAndId.appId).to.be(undefined); - expect(appPathAndId.appPath).to.be(undefined); - }); - }); - - describe('when appPath is empty', function () { - const absoluteUrl = 'http://www.test.com:5601/app/appId'; - it('extracts app id', function () { - expect(extractAppPathAndId(absoluteUrl).appId).to.be('appId'); - }); - it('extracts empty appPath', function () { - expect(extractAppPathAndId(absoluteUrl).appPath).to.be(''); - }); - }); - }); - - describe('from a root relative url', function () { - describe('with a base path', function () { - const basePath = '/gza'; - const rootRelativePath = '/gza/app/appId#appPathIsHere?query=here'; - it('extracts app path', function () { - expect(extractAppPathAndId(rootRelativePath, basePath).appPath).to.be( - 'appPathIsHere?query=here' - ); - }); - - it('extracts app id', function () { - expect(extractAppPathAndId(rootRelativePath, basePath).appId).to.be('appId'); - }); - - it('returns an empty object when there is no app path', function () { - const appPathAndId = extractAppPathAndId('/gza/notformattedright'); - expect(appPathAndId.appId).to.be(undefined); - expect(appPathAndId.appPath).to.be(undefined); - }); - }); - - describe('without a base path', function () { - const rootRelativePath = '/app/appId#appPathIsHere?query=here'; - it('extracts app path', function () { - expect(extractAppPathAndId(rootRelativePath).appPath).to.be('appPathIsHere?query=here'); - }); - - it('extracts app id', function () { - expect(extractAppPathAndId(rootRelativePath).appId).to.be('appId'); - }); - - it('returns an empty object when there is no app path', function () { - const appPathAndId = extractAppPathAndId('/notformattedright'); - expect(appPathAndId.appId).to.be(undefined); - expect(appPathAndId.appPath).to.be(undefined); - }); - }); - - describe('when appPath is empty', function () { - const rootRelativePath = '/app/appId'; - it('extracts app id', function () { - expect(extractAppPathAndId(rootRelativePath).appId).to.be('appId'); - }); - it('extracts empty appPath', function () { - expect(extractAppPathAndId(rootRelativePath).appPath).to.be(''); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/url/__tests__/kibana_parsed_url.js b/src/legacy/ui/public/url/__tests__/kibana_parsed_url.js deleted file mode 100644 index 6ea199c3d22cc..0000000000000 --- a/src/legacy/ui/public/url/__tests__/kibana_parsed_url.js +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; - -import { KibanaParsedUrl } from '../kibana_parsed_url'; - -describe('KibanaParsedUrl', function () { - it('getHashedAppPath', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - basePath: '/hi', - appId: 'bye', - appPath: 'visualize?hi=there&bye', - }); - expect(kibanaParsedUrl.getHashedAppPath()).to.be('#visualize?hi=there&bye'); - }); - - it('getAppRootPath', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - basePath: '/hi', - appId: 'appId', - appPath: 'dashboard?edit=123', - }); - expect(kibanaParsedUrl.getAppRootPath()).to.be('/app/appId#dashboard?edit=123'); - }); - - describe('when basePath is specified', function () { - it('getRootRelativePath', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - basePath: '/base', - appId: 'appId', - appPath: 'visualize?hi=there&bye', - }); - expect(kibanaParsedUrl.getRootRelativePath()).to.be('/base/app/appId#visualize?hi=there&bye'); - }); - - describe('getAbsolutePath', function () { - const protocol = 'http'; - const hostname = 'www.test.com'; - const port = '5601'; - - it('returns the absolute url when there is a port', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - basePath: '/base', - appId: 'appId', - appPath: 'visualize?hi=there&bye', - hostname, - protocol, - port, - }); - expect(kibanaParsedUrl.getAbsoluteUrl()).to.be( - 'http://www.test.com:5601/base/app/appId#visualize?hi=there&bye' - ); - }); - - it('returns the absolute url when there are no query parameters', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - basePath: '/base', - appId: 'appId', - appPath: 'visualize', - hostname, - protocol, - }); - expect(kibanaParsedUrl.getAbsoluteUrl()).to.be( - 'http://www.test.com/base/app/appId#visualize' - ); - }); - - it('returns the absolute url when the are query parameters', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - basePath: '/base', - appId: 'appId', - appPath: 'visualize?hi=bye&tata', - hostname, - protocol, - }); - expect(kibanaParsedUrl.getAbsoluteUrl()).to.be( - 'http://www.test.com/base/app/appId#visualize?hi=bye&tata' - ); - }); - }); - }); - - describe('when basePath is not specified', function () { - it('getRootRelativePath', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - appId: 'appId', - appPath: 'visualize?hi=there&bye', - }); - expect(kibanaParsedUrl.getRootRelativePath()).to.be('/app/appId#visualize?hi=there&bye'); - }); - - describe('getAbsolutePath', function () { - const protocol = 'http'; - const hostname = 'www.test.com'; - const port = '5601'; - - it('returns the absolute url when there is a port', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - appId: 'appId', - appPath: 'visualize?hi=there&bye', - hostname, - protocol, - port, - }); - expect(kibanaParsedUrl.getAbsoluteUrl()).to.be( - 'http://www.test.com:5601/app/appId#visualize?hi=there&bye' - ); - }); - - it('returns the absolute url when there are no query parameters', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - appId: 'appId', - appPath: 'visualize', - hostname, - protocol, - }); - expect(kibanaParsedUrl.getAbsoluteUrl()).to.be('http://www.test.com/app/appId#visualize'); - }); - - it('returns the absolute url when there are query parameters', function () { - const kibanaParsedUrl = new KibanaParsedUrl({ - appId: 'appId', - appPath: 'visualize?hi=bye&tata', - hostname, - protocol, - }); - expect(kibanaParsedUrl.getAbsoluteUrl()).to.be( - 'http://www.test.com/app/appId#visualize?hi=bye&tata' - ); - }); - }); - }); - - describe('getGlobalState', function () { - const basePath = '/xyz'; - const appId = 'myApp'; - - it('returns an empty string when the KibanaParsedUrl is in an invalid state', function () { - const url = new KibanaParsedUrl({ basePath }); - expect(url.getGlobalState()).to.be(''); - }); - - it('returns an empty string when there is no global state', function () { - const url = new KibanaParsedUrl({ basePath, appId, appPath: '/hi?notg=something' }); - expect(url.getGlobalState()).to.be(''); - }); - - it('returns the global state when it is the last parameter', function () { - const url = new KibanaParsedUrl({ - basePath, - appId, - appPath: '/hi?notg=something&_g=(thisismyglobalstate)', - }); - expect(url.getGlobalState()).to.be('(thisismyglobalstate)'); - }); - - it('returns the global state when it is the first parameter', function () { - const url = new KibanaParsedUrl({ - basePath, - appId, - appPath: '/hi?_g=(thisismyglobalstate)&hi=bye', - }); - expect(url.getGlobalState()).to.be('(thisismyglobalstate)'); - }); - }); - - describe('setGlobalState', function () { - const basePath = '/xyz'; - const appId = 'myApp'; - - it('does nothing when KibanaParsedUrl is in an invalid state', function () { - const url = new KibanaParsedUrl({ basePath }); - url.setGlobalState('newglobalstate'); - expect(url.getGlobalState()).to.be(''); - }); - - it('clears the global state when setting it to an empty string', function () { - const url = new KibanaParsedUrl({ basePath, appId, appPath: '/hi?_g=globalstate' }); - url.setGlobalState(''); - expect(url.getGlobalState()).to.be(''); - }); - - it('updates the global state when a string is passed in', function () { - const url = new KibanaParsedUrl({ - basePath, - appId, - appPath: '/hi?notg=something&_g=oldstate', - }); - url.setGlobalState('newstate'); - expect(url.getGlobalState()).to.be('newstate'); - }); - - it('adds the global state parameters if it did not exist before', function () { - const url = new KibanaParsedUrl({ basePath, appId, appPath: '/hi' }); - url.setGlobalState('newstate'); - expect(url.getGlobalState()).to.be('newstate'); - expect(url.appPath).to.be('/hi?_g=newstate'); - }); - }); -}); diff --git a/src/legacy/ui/public/url/__tests__/prepend_path.js b/src/legacy/ui/public/url/__tests__/prepend_path.js deleted file mode 100644 index 36991b77553e4..0000000000000 --- a/src/legacy/ui/public/url/__tests__/prepend_path.js +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; - -import { prependPath } from '../prepend_path'; - -describe('url prependPath', function () { - describe('returns the relative path unchanged', function () { - it('if it is null', function () { - expect(prependPath(null, 'kittens')).to.be(null); - }); - - it('if it is undefined', function () { - expect(prependPath(undefined, 'kittens')).to.be(undefined); - }); - - it('if it is an absolute url', function () { - expect(prependPath('http://www.hithere.com/howareyou', 'welcome')).to.be( - 'http://www.hithere.com/howareyou' - ); - }); - - it('if it does not start with a /', function () { - expect(prependPath('are/so/cool', 'cats')).to.be('are/so/cool'); - }); - - it('when new path is empty', function () { - expect(prependPath('/are/so/cool', '')).to.be('/are/so/cool'); - }); - - it('when it is only a slash and new path is empty', function () { - expect(prependPath('/', '')).to.be('/'); - }); - }); - - describe('returns an updated relative path', function () { - it('when it starts with a slash', function () { - expect(prependPath('/are/so/cool', 'dinosaurs')).to.be('dinosaurs/are/so/cool'); - }); - - it('when new path starts with a slash', function () { - expect(prependPath('/are/so/cool', '/fish')).to.be('/fish/are/so/cool'); - }); - - it('with two slashes if new path is a slash', function () { - expect(prependPath('/are/so/cool', '/')).to.be('//are/so/cool'); - }); - - it('when there is a slash on the end', function () { - expect(prependPath('/are/delicious/', 'lollipops')).to.be('lollipops/are/delicious/'); - }); - - it('when pathname that ends with a file', function () { - expect(prependPath('/are/delicious/index.html', 'donuts')).to.be( - 'donuts/are/delicious/index.html' - ); - }); - - it('when it is only a slash', function () { - expect(prependPath('/', 'kittens')).to.be('kittens/'); - }); - }); -}); diff --git a/src/legacy/ui/public/url/__tests__/url.js b/src/legacy/ui/public/url/__tests__/url.js deleted file mode 100644 index 8b173482e1bb4..0000000000000 --- a/src/legacy/ui/public/url/__tests__/url.js +++ /dev/null @@ -1,562 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import faker from 'faker'; -import _ from 'lodash'; -import { AppStateProvider } from '../../state_management/app_state'; -import '..'; - -// global vars, injected and mocked in init() -let kbnUrl; -let $route; -let $location; -let $rootScope; -let appState; - -class StubAppState { - constructor() { - this.getQueryParamName = () => '_a'; - this.toQueryParam = () => 'stateQueryParam'; - this.destroy = sinon.stub(); - } -} - -function init() { - ngMock.module('kibana/url', 'kibana', function ($provide, PrivateProvider) { - $provide.service('$route', function () { - return { - reload: _.noop, - }; - }); - - appState = new StubAppState(); - PrivateProvider.swap(AppStateProvider, ($decorate) => { - const AppState = $decorate(); - AppState.getAppState = () => appState; - return AppState; - }); - }); - - ngMock.inject(function ($injector) { - $route = $injector.get('$route'); - $location = $injector.get('$location'); - $rootScope = $injector.get('$rootScope'); - kbnUrl = $injector.get('kbnUrl'); - }); -} - -describe('kbnUrl', function () { - beforeEach(function () { - init(); - }); - - describe('forcing reload', function () { - it('schedules a listener for $locationChangeSuccess on the $rootScope', function () { - $location.url('/url'); - $route.current = { - $$route: { - regex: /.*/, - }, - }; - - sinon.stub($rootScope, '$on'); - - expect($rootScope.$on.callCount).to.be(0); - kbnUrl.change('/url'); - sinon.assert.calledOnce(appState.destroy); - expect($rootScope.$on.callCount).to.be(1); - expect($rootScope.$on.firstCall.args[0]).to.be('$locationChangeSuccess'); - }); - - it('handler unbinds the listener and calls reload', function () { - $location.url('/url'); - $route.current = { - $$route: { - regex: /.*/, - }, - }; - - const unbind = sinon.stub(); - sinon.stub($rootScope, '$on').returns(unbind); - $route.reload = sinon.stub(); - - expect($rootScope.$on.callCount).to.be(0); - kbnUrl.change('/url'); - expect($rootScope.$on.callCount).to.be(1); - - const handler = $rootScope.$on.firstCall.args[1]; - handler(); - expect(unbind.callCount).to.be(1); - expect($route.reload.callCount).to.be(1); - }); - - it('reloads requested before the first are ignored', function () { - $location.url('/url'); - $route.current = { - $$route: { - regex: /.*/, - }, - }; - $route.reload = sinon.stub(); - - sinon.stub($rootScope, '$on').returns(sinon.stub()); - - expect($rootScope.$on.callCount).to.be(0); - kbnUrl.change('/url'); - expect($rootScope.$on.callCount).to.be(1); - - // don't call the first handler - - kbnUrl.change('/url'); - expect($rootScope.$on.callCount).to.be(1); - }); - - it('one reload can happen once the first has completed', function () { - $location.url('/url'); - $route.current = { - $$route: { - regex: /.*/, - }, - }; - $route.reload = sinon.stub(); - - sinon.stub($rootScope, '$on').returns(sinon.stub()); - - expect($rootScope.$on.callCount).to.be(0); - kbnUrl.change('/url'); - expect($rootScope.$on.callCount).to.be(1); - - // call the first handler - $rootScope.$on.firstCall.args[1](); - expect($route.reload.callCount).to.be(1); - - expect($rootScope.$on.callCount).to.be(1); - kbnUrl.change('/url'); - expect($rootScope.$on.callCount).to.be(2); - }); - }); - - describe('remove', function () { - it('removes a parameter with a value from the url', function () { - $location.url('/myurl?exist&WithAParamToRemove=2&anothershouldexist=5'); - kbnUrl.removeParam('WithAParamToRemove'); - expect($location.url()).to.be('/myurl?exist&anothershouldexist=5'); - }); - - it('removes a parameter with no value from the url', function () { - $location.url('/myurl?removeme&hi=5'); - kbnUrl.removeParam('removeme'); - expect($location.url()).to.be('/myurl?hi=5'); - }); - - it('is noop if the given parameter doesn\t exist in the url', function () { - $location.url('/myurl?hi&bye'); - kbnUrl.removeParam('noexist'); - expect($location.url()).to.be('/myurl?hi&bye'); - }); - - it('is noop if given empty string param', function () { - $location.url('/myurl?hi&bye'); - kbnUrl.removeParam(''); - expect($location.url()).to.be('/myurl?hi&bye'); - }); - }); - - describe('change', function () { - it('should set $location.url', function () { - sinon.stub($location, 'url'); - - expect($location.url.callCount).to.be(0); - kbnUrl.change('/some-url'); - expect($location.url.callCount).to.be(1); - }); - - it('should uri encode replaced params', function () { - const url = '/some/path/'; - const params = { replace: faker.Lorem.words(3).join(' ') }; - const check = encodeURIComponent(params.replace); - sinon.stub($location, 'url'); - - kbnUrl.change(url + '{{replace}}', params); - - expect($location.url.firstCall.args[0]).to.be(url + check); - }); - - it('should parse angular expression in substitutions and uri encode the results', function () { - // build url by piecing together these parts - const urlParts = ['/', '/', '?', '&', '#']; - // make sure it can parse templates with weird spacing - const wrappers = [ - ['{{', '}}'], - ['{{ ', ' }}'], - ['{{', ' }}'], - ['{{ ', '}}'], - ['{{ ', ' }}'], - ]; - // make sure filters are evaluated via angular expressions - const objIndex = 4; // used to case one replace as an object - const filters = ['', 'uppercase', '', 'uppercase', '']; - - // the words (template keys) used must all be unique - const words = _.uniq(faker.Lorem.words(10)) - .slice(0, urlParts.length) - .map(function (word, i) { - if (filters[i].length) { - return word + '|' + filters[i]; - } - return word; - }); - - const replacements = faker.Lorem.words(urlParts.length).map(function (word, i) { - // make selected replacement into an object - if (i === objIndex) { - return { replace: word }; - } - - return word; - }); - - // build the url and test url - let url = ''; - let testUrl = ''; - urlParts.forEach(function (part, i) { - url += part + wrappers[i][0] + words[i] + wrappers[i][1]; - const locals = {}; - locals[words[i].split('|')[0]] = replacements[i]; - testUrl += part + encodeURIComponent($rootScope.$eval(words[i], locals)); - }); - - // create the locals replacement object - const params = {}; - replacements.forEach(function (replacement, i) { - const word = words[i].split('|')[0]; - params[word] = replacement; - }); - - sinon.stub($location, 'url'); - - kbnUrl.change(url, params); - - expect($location.url.firstCall.args[0]).to.not.be(url); - expect($location.url.firstCall.args[0]).to.be(testUrl); - }); - - it('should handle dot notation', function () { - const url = '/some/thing/{{that.is.substituted}}'; - - kbnUrl.change(url, { - that: { - is: { - substituted: 'test', - }, - }, - }); - - expect($location.url()).to.be('/some/thing/test'); - }); - - it('should throw when params are missing', function () { - const url = '/{{replace_me}}'; - const params = {}; - - try { - kbnUrl.change(url, params); - throw new Error('this should not run'); - } catch (err) { - expect(err).to.be.an(Error); - expect(err.message).to.match(/replace_me/); - } - }); - - it('should throw when filtered params are missing', function () { - const url = '/{{replace_me|number}}'; - const params = {}; - - try { - kbnUrl.change(url, params); - throw new Error('this should not run'); - } catch (err) { - expect(err).to.be.an(Error); - expect(err.message).to.match(/replace_me\|number/); - } - }); - - it('should change the entire url', function () { - const path = '/test/path'; - const search = { search: 'test' }; - const hash = 'hash'; - const newPath = '/new/location'; - - $location.path(path).search(search).hash(hash); - - // verify the starting state - expect($location.path()).to.be(path); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - - kbnUrl.change(newPath); - - // verify the ending state - expect($location.path()).to.be(newPath); - expect($location.search()).to.eql({}); - expect($location.hash()).to.be(''); - }); - - it('should allow setting app state on the target url', function () { - const path = '/test/path'; - const search = { search: 'test' }; - const hash = 'hash'; - const newPath = '/new/location'; - - $location.path(path).search(search).hash(hash); - - // verify the starting state - expect($location.path()).to.be(path); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - - kbnUrl.change(newPath, null, new StubAppState()); - - // verify the ending state - expect($location.path()).to.be(newPath); - expect($location.search()).to.eql({ _a: 'stateQueryParam' }); - expect($location.hash()).to.be(''); - }); - }); - - describe('changePath', function () { - it('should change just the path', function () { - const path = '/test/path'; - const search = { search: 'test' }; - const hash = 'hash'; - const newPath = '/new/location'; - - $location.path(path).search(search).hash(hash); - - // verify the starting state - expect($location.path()).to.be(path); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - - kbnUrl.changePath(newPath); - - // verify the ending state - expect($location.path()).to.be(newPath); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - }); - }); - - describe('redirect', function () { - it('should change the entire url', function () { - const path = '/test/path'; - const search = { search: 'test' }; - const hash = 'hash'; - const newPath = '/new/location'; - - $location.path(path).search(search).hash(hash); - - // verify the starting state - expect($location.path()).to.be(path); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - - kbnUrl.redirect(newPath); - - // verify the ending state - expect($location.path()).to.be(newPath); - expect($location.search()).to.eql({}); - expect($location.hash()).to.be(''); - }); - - it('should allow setting app state on the target url', function () { - const path = '/test/path'; - const search = { search: 'test' }; - const hash = 'hash'; - const newPath = '/new/location'; - - $location.path(path).search(search).hash(hash); - - // verify the starting state - expect($location.path()).to.be(path); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - - kbnUrl.redirect(newPath, null, new StubAppState()); - - // verify the ending state - expect($location.path()).to.be(newPath); - expect($location.search()).to.eql({ _a: 'stateQueryParam' }); - expect($location.hash()).to.be(''); - }); - - it('should replace the current history entry', function () { - sinon.stub($location, 'replace'); - $location.url('/some/path'); - - expect($location.replace.callCount).to.be(0); - kbnUrl.redirect('/new/path/'); - expect($location.replace.callCount).to.be(1); - }); - - it('should call replace on $location', function () { - sinon.stub(kbnUrl, '_shouldForceReload').returns(false); - sinon.stub($location, 'replace'); - - expect($location.replace.callCount).to.be(0); - kbnUrl.redirect('/poop'); - expect($location.replace.callCount).to.be(1); - }); - }); - - describe('redirectPath', function () { - it('should only change the path', function () { - const path = '/test/path'; - const search = { search: 'test' }; - const hash = 'hash'; - const newPath = '/new/location'; - - $location.path(path).search(search).hash(hash); - - // verify the starting state - expect($location.path()).to.be(path); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - - kbnUrl.redirectPath(newPath); - - // verify the ending state - expect($location.path()).to.be(newPath); - expect($location.search()).to.eql(search); - expect($location.hash()).to.be(hash); - }); - - it('should call replace on $location', function () { - sinon.stub(kbnUrl, '_shouldForceReload').returns(false); - sinon.stub($location, 'replace'); - - expect($location.replace.callCount).to.be(0); - kbnUrl.redirectPath('/poop'); - expect($location.replace.callCount).to.be(1); - }); - }); - - describe('_shouldForceReload', function () { - let next; - let prev; - - beforeEach(function () { - $route.current = { - $$route: { - regexp: /^\/is-current-route\/(\d+)/, - reloadOnSearch: true, - }, - }; - - prev = { path: '/is-current-route/1', search: {} }; - next = { path: '/is-current-route/1', search: {} }; - }); - - it("returns false if the passed url doesn't match the current route", function () { - next.path = '/not current'; - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(false); - }); - - describe('if the passed url does match the route', function () { - describe('and the route reloads on search', function () { - describe('and the path is the same', function () { - describe('and the search params are the same', function () { - it('returns true', function () { - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(true); - }); - }); - describe('but the search params are different', function () { - it('returns false', function () { - next.search = {}; - prev.search = { q: 'search term' }; - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(false); - }); - }); - }); - - describe('and the path is different', function () { - beforeEach(function () { - next.path = '/not-same'; - }); - - describe('and the search params are the same', function () { - it('returns false', function () { - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(false); - }); - }); - describe('but the search params are different', function () { - it('returns false', function () { - next.search = {}; - prev.search = { q: 'search term' }; - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(false); - }); - }); - }); - }); - - describe('but the route does not reload on search', function () { - beforeEach(function () { - $route.current.$$route.reloadOnSearch = false; - }); - - describe('and the path is the same', function () { - describe('and the search params are the same', function () { - it('returns true', function () { - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(true); - }); - }); - describe('but the search params are different', function () { - it('returns true', function () { - next.search = {}; - prev.search = { q: 'search term' }; - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(true); - }); - }); - }); - - describe('and the path is different', function () { - beforeEach(function () { - next.path = '/not-same'; - }); - - describe('and the search params are the same', function () { - it('returns false', function () { - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(false); - }); - }); - describe('but the search params are different', function () { - it('returns false', function () { - next.search = {}; - prev.search = { q: 'search term' }; - expect(kbnUrl._shouldForceReload(next, prev, $route)).to.be(false); - }); - }); - }); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/url/absolute_to_parsed_url.ts b/src/legacy/ui/public/url/absolute_to_parsed_url.ts deleted file mode 100644 index 30f493c25776c..0000000000000 --- a/src/legacy/ui/public/url/absolute_to_parsed_url.ts +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { parse } from 'url'; - -import { extractAppPathAndId } from './extract_app_path_and_id'; -import { KibanaParsedUrl } from './kibana_parsed_url'; - -/** - * - * @param absoluteUrl - an absolute url, e.g. https://localhost:5601/gra/app/visualize#/edit/viz_id?hi=bye - * @param basePath - An optional base path for kibana. If supplied, should start with a "/". - * e.g. in https://localhost:5601/gra/app/visualize#/edit/viz_id the basePath is - * "/gra". - * @return {KibanaParsedUrl} - */ -export function absoluteToParsedUrl(absoluteUrl: string, basePath = '') { - const { appPath, appId } = extractAppPathAndId(absoluteUrl, basePath); - const { hostname, port, protocol } = parse(absoluteUrl); - return new KibanaParsedUrl({ - basePath, - appId: appId!, - appPath, - hostname, - port, - protocol, - }); -} diff --git a/src/legacy/ui/public/url/extract_app_path_and_id.ts b/src/legacy/ui/public/url/extract_app_path_and_id.ts deleted file mode 100644 index 44bba272e0873..0000000000000 --- a/src/legacy/ui/public/url/extract_app_path_and_id.ts +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { parse } from 'url'; - -/** - * If the url is determined to contain an appId and appPath, it returns those portions. If it is not in the right - * format and an appId and appPath can't be extracted, it returns an empty object. - * @param {string} url - a relative or absolute url which contains an appPath, an appId, and optionally, a basePath. - * @param {string} basePath - optional base path, if given should start with "/". - */ -export function extractAppPathAndId(url: string, basePath = '') { - const parsedUrl = parse(url); - if (!parsedUrl.path) { - return {}; - } - const pathWithoutBase = parsedUrl.path.slice(basePath.length); - - if (!pathWithoutBase.startsWith('/app/')) { - return {}; - } - - const appPath = parsedUrl.hash && parsedUrl.hash.length > 0 ? parsedUrl.hash.slice(1) : ''; - return { appId: pathWithoutBase.slice('/app/'.length), appPath }; -} diff --git a/src/legacy/ui/public/url/index.js b/src/legacy/ui/public/url/index.js deleted file mode 100644 index 8ef267de2890c..0000000000000 --- a/src/legacy/ui/public/url/index.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { KbnUrlProvider } from './url'; -export { RedirectWhenMissingProvider } from './redirect_when_missing'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -export { modifyUrl } from '../../../../core/utils'; diff --git a/src/legacy/ui/public/url/kbn_url.ts b/src/legacy/ui/public/url/kbn_url.ts deleted file mode 100644 index 42b6a8f19f9a9..0000000000000 --- a/src/legacy/ui/public/url/kbn_url.ts +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export interface KbnUrl { - change: (url: string) => void; - removeParam: (param: string) => void; -} diff --git a/src/legacy/ui/public/url/kibana_parsed_url.ts b/src/legacy/ui/public/url/kibana_parsed_url.ts deleted file mode 100644 index 1c60e8729e0ff..0000000000000 --- a/src/legacy/ui/public/url/kibana_parsed_url.ts +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { parse } from 'url'; - -import { modifyUrl } from '../../../../core/public'; -import { prependPath } from './prepend_path'; - -interface Options { - /** - * An optional base path for kibana. If supplied, should start with a "/". - * e.g. in https://localhost:5601/gra/app/visualize#/edit/viz_id the - * basePath is "/gra" - */ - basePath?: string; - - /** - * The app id. - * e.g. in https://localhost:5601/gra/app/visualize#/edit/viz_id the app id is "kibana". - */ - appId: string; - - /** - * The path for a page in the the app. Should start with a "/". Don't include the hash sign. Can - * include all query parameters. - * e.g. in https://localhost:5601/gra/app/visualize#/edit/viz_id?g=state the appPath is - * "/edit/viz_id?g=state" - */ - appPath?: string; - - /** - * Optional hostname. Uses current window location's hostname if hostname, port, - * and protocol are undefined. - */ - hostname?: string; - - /** - * Optional port. Uses current window location's port if hostname, port, - * and protocol are undefined. - */ - port?: string; - - /** - * Optional protocol. Uses current window location's protocol if hostname, port, - * and protocol are undefined. - */ - protocol?: string; -} - -/** - * Represents the pieces that make up a url in Kibana, offering some helpful functionality - * for translating those pieces into absolute or relative urls. A Kibana url with a basePath - * looks like this: http://localhost:5601/basePath/app/appId#/an/appPath?with=query¶ms - * - * - basePath is "/basePath" - * - appId is "appId" - * - appPath is "/an/appPath?with=query¶ms" - * - * Almost all urls in Kibana should have this structure, including the "/app" portion in front of the appId - * (one exception is the login link). - */ -export class KibanaParsedUrl { - public appId: string; - public appPath: string; - public basePath: string; - public hostname?: string; - public protocol?: string; - public port?: string; - - constructor(options: Options) { - const { appId, basePath = '', appPath = '', hostname, protocol, port } = options; - - // We'll use window defaults - const hostOrProtocolSpecified = hostname || protocol || port; - - this.basePath = basePath; - this.appId = appId; - this.appPath = appPath; - this.hostname = hostOrProtocolSpecified ? hostname : window.location.hostname; - this.port = hostOrProtocolSpecified ? port : window.location.port; - this.protocol = hostOrProtocolSpecified ? protocol : window.location.protocol; - } - - public getGlobalState() { - if (!this.appPath) { - return ''; - } - const parsedUrl = parse(this.appPath, true); - const query = parsedUrl.query || {}; - return query._g || ''; - } - - public setGlobalState(newGlobalState: string | string[]) { - if (!this.appPath) { - return; - } - - this.appPath = modifyUrl(this.appPath, (parsed) => { - parsed.query._g = newGlobalState; - }); - } - - public addQueryParameter(name: string, val: string) { - this.appPath = modifyUrl(this.appPath, (parsed) => { - parsed.query[name] = val; - }); - } - - public getHashedAppPath() { - return `#${this.appPath}`; - } - - public getAppBasePath() { - return `/${this.appId}`; - } - - public getAppRootPath() { - return `/app${this.getAppBasePath()}${this.getHashedAppPath()}`; - } - - public getRootRelativePath() { - return prependPath(this.getAppRootPath(), this.basePath); - } - - public getAbsoluteUrl() { - return modifyUrl(this.getRootRelativePath(), (parsed) => { - parsed.protocol = this.protocol; - parsed.port = this.port; - parsed.hostname = this.hostname; - }); - } -} diff --git a/src/legacy/ui/public/url/prepend_path.ts b/src/legacy/ui/public/url/prepend_path.ts deleted file mode 100644 index b8a77d5c23bee..0000000000000 --- a/src/legacy/ui/public/url/prepend_path.ts +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { isString } from 'lodash'; -import { format, parse } from 'url'; - -/** - * - * @param {string} relativePath - a relative path that must start with a "/". - * @param {string} newPath - the new path to prefix. ex: 'xyz' - * @return {string} the url with the basePath prepended. ex. '/xyz/app/kibana#/management'. If - * the relative path isn't in the right format (e.g. doesn't start with a "/") the relativePath is returned - * unchanged. - */ -export function prependPath(relativePath: string, newPath = '') { - if (!relativePath || !isString(relativePath)) { - return relativePath; - } - - const parsed = parse(relativePath, true, true); - if (!parsed.host && parsed.pathname) { - if (parsed.pathname[0] === '/') { - parsed.pathname = newPath + parsed.pathname; - } - } - - return format({ - protocol: parsed.protocol, - host: parsed.host, - pathname: parsed.pathname, - query: parsed.query, - hash: parsed.hash, - }); -} diff --git a/src/legacy/ui/public/url/redirect_when_missing.js b/src/legacy/ui/public/url/redirect_when_missing.js deleted file mode 100644 index 85c90a14d9fd7..0000000000000 --- a/src/legacy/ui/public/url/redirect_when_missing.js +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React from 'react'; -import { i18n } from '@kbn/i18n'; -import { MarkdownSimple } from '../../../../plugins/kibana_react/public'; -import { toastNotifications } from 'ui/notify'; -import { SavedObjectNotFound } from '../../../../plugins/kibana_utils/public'; -import { uiModules } from '../modules'; - -uiModules.get('kibana/url').service('redirectWhenMissing', function (Private) { - return Private(RedirectWhenMissingProvider); -}); - -export function RedirectWhenMissingProvider(kbnUrl, Promise) { - /** - * Creates an error handler that will redirect to a url when a SavedObjectNotFound - * error is thrown - * - * @param {string|object} mapping - a mapping of url's to redirect to based on the saved object that - * couldn't be found, or just a string that will be used for all types - * @return {function} - the handler to pass to .catch() - */ - return function (mapping) { - if (typeof mapping === 'string') { - mapping = { '*': mapping }; - } - - return function (error) { - // if this error is not "404", rethrow - const savedObjectNotFound = error instanceof SavedObjectNotFound; - const unknownVisType = error.message.indexOf('Invalid type') === 0; - if (unknownVisType) { - error.savedObjectType = 'visualization'; - } else if (!savedObjectNotFound) { - throw error; - } - - let url = mapping[error.savedObjectType] || mapping['*']; - if (!url) url = '/'; - - url += (url.indexOf('?') >= 0 ? '&' : '?') + `notFound=${error.savedObjectType}`; - - toastNotifications.addWarning({ - title: i18n.translate('common.ui.url.savedObjectIsMissingNotificationMessage', { - defaultMessage: 'Saved object is missing', - }), - text: {error.message}, - }); - - kbnUrl.redirect(url); - return Promise.halt(); - }; - }; -} diff --git a/src/legacy/ui/public/url/relative_to_absolute.ts b/src/legacy/ui/public/url/relative_to_absolute.ts deleted file mode 100644 index 7d0737d145a1b..0000000000000 --- a/src/legacy/ui/public/url/relative_to_absolute.ts +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * - * @param {string} url - a relative or root relative url. If a relative path is given then the - * absolute url returned will depend on the current page where this function is called from. For example - * if you are on page "http://www.mysite.com/shopping/kids" and you pass this function "adults", you would get - * back "http://www.mysite.com/shopping/adults". If you passed this function a root relative path, or one that - * starts with a "/", for example "/account/cart", you would get back "http://www.mysite.com/account/cart". - * @return {string} the relative url transformed into an absolute url - */ -export function relativeToAbsolute(url: string) { - // convert all link urls to absolute urls - const a = document.createElement('a'); - a.setAttribute('href', url); - return a.href; -} diff --git a/src/legacy/ui/public/url/url.js b/src/legacy/ui/public/url/url.js deleted file mode 100644 index fb243b02e05c7..0000000000000 --- a/src/legacy/ui/public/url/url.js +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import { i18n } from '@kbn/i18n'; -import { uiModules } from '../modules'; -import { AppStateProvider } from '../state_management/app_state'; - -uiModules.get('kibana/url').service('kbnUrl', function (Private, $injector) { - //config is not directly used but registers global event listeners to kbnUrl to function - $injector.get('config'); - return Private(KbnUrlProvider); -}); - -export function KbnUrlProvider($injector, $location, $rootScope, $parse, Private) { - /** - * the `kbnUrl` service was created to smooth over some of the - * inconsistent behavior that occurs when modifying the url via - * the `$location` api. In general it is recommended that you use - * the `kbnUrl` service any time you want to modify the url. - * - * "features" that `kbnUrl` does it's best to guarantee, which - * are not guaranteed with the `$location` service: - * - calling `kbnUrl.change()` with a url that resolves to the current - * route will force a full transition (rather than just updating the - * properties of the $route object) - * - * Additional features of `kbnUrl` - * - parameterized urls - * - easily include an app state with the url - * - * @type {KbnUrl} - */ - const self = this; - - /** - * Navigate to a url - * - * @param {String} url - the new url, can be a template. See #eval - * @param {Object} [paramObj] - optional set of parameters for the url template - * @return {undefined} - */ - self.change = function (url, paramObj, appState) { - self._changeLocation('url', url, paramObj, false, appState); - }; - - /** - * Same as #change except only changes the url's path, - * leaving the search string and such intact - * - * @param {String} path - the new path, can be a template. See #eval - * @param {Object} [paramObj] - optional set of parameters for the path template - * @return {undefined} - */ - self.changePath = function (path, paramObj) { - self._changeLocation('path', path, paramObj); - }; - - /** - * Same as #change except that it removes the current url from history - * - * @param {String} url - the new url, can be a template. See #eval - * @param {Object} [paramObj] - optional set of parameters for the url template - * @return {undefined} - */ - self.redirect = function (url, paramObj, appState) { - self._changeLocation('url', url, paramObj, true, appState); - }; - - /** - * Same as #redirect except only changes the url's path, - * leaving the search string and such intact - * - * @param {String} path - the new path, can be a template. See #eval - * @param {Object} [paramObj] - optional set of parameters for the path template - * @return {undefined} - */ - self.redirectPath = function (path, paramObj) { - self._changeLocation('path', path, paramObj, true); - }; - - /** - * Evaluate a url template. templates can contain double-curly wrapped - * expressions that are evaluated in the context of the paramObj - * - * @param {String} template - the url template to evaluate - * @param {Object} [paramObj] - the variables to expose to the template - * @return {String} - the evaluated result - * @throws {Error} If any of the expressions can't be parsed. - */ - self.eval = function (template, paramObj) { - paramObj = paramObj || {}; - - return template.replace(/\{\{([^\}]+)\}\}/g, function (match, expr) { - // remove filters - const key = expr.split('|')[0].trim(); - - // verify that the expression can be evaluated - const p = $parse(key)(paramObj); - - // if evaluation can't be made, throw - if (_.isUndefined(p)) { - throw new Error( - i18n.translate('common.ui.url.replacementFailedErrorMessage', { - defaultMessage: 'Replacement failed, unresolved expression: {expr}', - values: { expr }, - }) - ); - } - - return encodeURIComponent($parse(expr)(paramObj)); - }); - }; - - /** - * convert an object's route to an href, compatible with - * window.location.href= and - * - * @param {Object} obj - any object that list's it's routes at obj.routes{} - * @param {string} route - the route name - * @return {string} - the computed href - */ - self.getRouteHref = function (obj, route) { - return '#' + self.getRouteUrl(obj, route); - }; - - /** - * convert an object's route to a url, compatible with url.change() or $location.url() - * - * @param {Object} obj - any object that list's it's routes at obj.routes{} - * @param {string} route - the route name - * @return {string} - the computed url - */ - self.getRouteUrl = function (obj, route) { - const template = obj && obj.routes && obj.routes[route]; - if (template) return self.eval(template, obj); - }; - - /** - * Similar to getRouteUrl, supports objects which list their routes, - * and redirects to the named route. See #redirect - * - * @param {Object} obj - any object that list's it's routes at obj.routes{} - * @param {string} route - the route name - * @return {undefined} - */ - self.redirectToRoute = function (obj, route) { - self.redirect(self.getRouteUrl(obj, route)); - }; - - /** - * Similar to getRouteUrl, supports objects which list their routes, - * and changes the url to the named route. See #change - * - * @param {Object} obj - any object that list's it's routes at obj.routes{} - * @param {string} route - the route name - * @return {undefined} - */ - self.changeToRoute = function (obj, route) { - self.change(self.getRouteUrl(obj, route)); - }; - - /** - * Removes the given parameter from the url. Does so without modifying the browser - * history. - * @param param - */ - self.removeParam = function (param) { - $location.search(param, null).replace(); - }; - - ///// - // private api - ///// - let reloading; - - self._changeLocation = function (type, url, paramObj, replace, appState) { - const prev = { - path: $location.path(), - search: $location.search(), - }; - - url = self.eval(url, paramObj); - $location[type](url); - if (replace) $location.replace(); - - if (appState) { - $location.search(appState.getQueryParamName(), appState.toQueryParam()); - } - - const next = { - path: $location.path(), - search: $location.search(), - }; - - if ($injector.has('$route')) { - const $route = $injector.get('$route'); - - if (self._shouldForceReload(next, prev, $route)) { - const appState = Private(AppStateProvider).getAppState(); - if (appState) appState.destroy(); - - reloading = $rootScope.$on('$locationChangeSuccess', function () { - // call the "unlisten" function returned by $on - reloading(); - reloading = false; - - $route.reload(); - }); - } - } - }; - - // determine if the router will automatically reload the route - self._shouldForceReload = function (next, prev, $route) { - if (reloading) return false; - - const route = $route.current && $route.current.$$route; - if (!route) return false; - - // for the purposes of determining whether the router will - // automatically be reloading, '' and '/' are equal - const nextPath = next.path || '/'; - const prevPath = prev.path || '/'; - if (nextPath !== prevPath) return false; - - const reloadOnSearch = route.reloadOnSearch; - const searchSame = _.isEqual(next.search, prev.search); - return (reloadOnSearch && searchSame) || !reloadOnSearch; - }; -} diff --git a/src/legacy/ui/public/utils/collection.test.ts b/src/legacy/ui/public/utils/collection.test.ts deleted file mode 100644 index 0841e3554c0d0..0000000000000 --- a/src/legacy/ui/public/utils/collection.test.ts +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { move } from './collection'; - -describe('collection', () => { - describe('move', () => { - test('accepts previous from->to syntax', () => { - const list = [1, 1, 1, 1, 1, 1, 1, 1, 8, 1, 1]; - - expect(list[3]).toBe(1); - expect(list[8]).toBe(8); - - move(list, 8, 3); - - expect(list[8]).toBe(1); - expect(list[3]).toBe(8); - }); - - test('moves an object up based on a function callback', () => { - const list = [1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1]; - - expect(list[4]).toBe(0); - expect(list[5]).toBe(1); - expect(list[6]).toBe(0); - - move(list, 5, false, (v: any) => v === 0); - - expect(list[4]).toBe(1); - expect(list[5]).toBe(0); - expect(list[6]).toBe(0); - }); - - test('moves an object down based on a function callback', () => { - const list = [1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1]; - - expect(list[4]).toBe(0); - expect(list[5]).toBe(1); - expect(list[6]).toBe(0); - - move(list, 5, true, (v: any) => v === 0); - - expect(list[4]).toBe(0); - expect(list[5]).toBe(0); - expect(list[6]).toBe(1); - }); - - test('moves an object up based on a where callback', () => { - const list = [ - { v: 1 }, - { v: 1 }, - { v: 1 }, - { v: 1 }, - { v: 0 }, - { v: 1 }, - { v: 0 }, - { v: 1 }, - { v: 1 }, - { v: 1 }, - { v: 1 }, - ]; - - expect(list[4]).toHaveProperty('v', 0); - expect(list[5]).toHaveProperty('v', 1); - expect(list[6]).toHaveProperty('v', 0); - - move(list, 5, false, { v: 0 }); - - expect(list[4]).toHaveProperty('v', 1); - expect(list[5]).toHaveProperty('v', 0); - expect(list[6]).toHaveProperty('v', 0); - }); - - test('moves an object down based on a where callback', () => { - const list = [ - { v: 1 }, - { v: 1 }, - { v: 1 }, - { v: 1 }, - { v: 0 }, - { v: 1 }, - { v: 0 }, - { v: 1 }, - { v: 1 }, - { v: 1 }, - { v: 1 }, - ]; - - expect(list[4]).toHaveProperty('v', 0); - expect(list[5]).toHaveProperty('v', 1); - expect(list[6]).toHaveProperty('v', 0); - - move(list, 5, true, { v: 0 }); - - expect(list[4]).toHaveProperty('v', 0); - expect(list[5]).toHaveProperty('v', 0); - expect(list[6]).toHaveProperty('v', 1); - }); - - test('moves an object down based on a pluck callback', () => { - const list = [ - { id: 0, normal: true }, - { id: 1, normal: true }, - { id: 2, normal: true }, - { id: 3, normal: true }, - { id: 4, normal: true }, - { id: 5, normal: false }, - { id: 6, normal: true }, - { id: 7, normal: true }, - { id: 8, normal: true }, - { id: 9, normal: true }, - ]; - - expect(list[4]).toHaveProperty('id', 4); - expect(list[5]).toHaveProperty('id', 5); - expect(list[6]).toHaveProperty('id', 6); - - move(list, 5, true, 'normal'); - - expect(list[4]).toHaveProperty('id', 4); - expect(list[5]).toHaveProperty('id', 6); - expect(list[6]).toHaveProperty('id', 5); - }); - }); -}); diff --git a/src/legacy/ui/public/utils/collection.ts b/src/legacy/ui/public/utils/collection.ts deleted file mode 100644 index b882a2bbe6e5b..0000000000000 --- a/src/legacy/ui/public/utils/collection.ts +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; - -/** - * move an obj either up or down in the collection by - * injecting it either before/after the prev/next obj that - * satisfied the qualifier - * - * or, just from one index to another... - * - * @param {array} objs - the list to move the object within - * @param {number|any} obj - the object that should be moved, or the index that the object is currently at - * @param {number|boolean} below - the index to move the object to, or whether it should be moved up or down - * @param {function} qualifier - a lodash-y callback, object = _.where, string = _.pluck - * @return {array} - the objs argument - */ -export function move( - objs: any[], - obj: object | number, - below: number | boolean, - qualifier?: ((object: object, index: number) => any) | Record | string -): object[] { - const origI = _.isNumber(obj) ? obj : objs.indexOf(obj); - if (origI === -1) { - return objs; - } - - if (_.isNumber(below)) { - // move to a specific index - objs.splice(below, 0, objs.splice(origI, 1)[0]); - return objs; - } - - below = !!below; - qualifier = qualifier && _.iteratee(qualifier); - - const above = !below; - const finder = below ? _.findIndex : _.findLastIndex; - - // find the index of the next/previous obj that meets the qualifications - const targetI = finder(objs, (otherAgg, otherI) => { - if (below && otherI <= origI) { - return; - } - if (above && otherI >= origI) { - return; - } - return Boolean(_.isFunction(qualifier) && qualifier(otherAgg, otherI)); - }); - - if (targetI === -1) { - return objs; - } - - // place the obj at it's new index - objs.splice(targetI, 0, objs.splice(origI, 1)[0]); - return objs; -} diff --git a/src/legacy/ui/public/utils/legacy_class.js b/src/legacy/ui/public/utils/legacy_class.js deleted file mode 100644 index f47650a77bb6d..0000000000000 --- a/src/legacy/ui/public/utils/legacy_class.js +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// create a property descriptor for properties -// that won't change -function describeConst(val) { - return { - writable: false, - enumerable: false, - configurable: false, - value: val, - }; -} - -const props = { - inherits: describeConst(function (SuperClass) { - const prototype = Object.create(SuperClass.prototype, { - constructor: describeConst(this), - superConstructor: describeConst(SuperClass), - }); - - Object.defineProperties(this, { - prototype: describeConst(prototype), - Super: describeConst(SuperClass), - }); - - return this; - }), -}; - -/** - * Add class-related behavior to a function, currently this - * only attaches an .inherits() method. - * - * @param {Constructor} ClassConstructor - The function that should be extended - * @return {Constructor} - the constructor passed in; - */ -export function createLegacyClass(ClassConstructor) { - return Object.defineProperties(ClassConstructor, props); -} diff --git a/src/legacy/ui/ui_apps/__snapshots__/ui_apps_mixin.test.js.snap b/src/legacy/ui/ui_apps/__snapshots__/ui_apps_mixin.test.js.snap deleted file mode 100644 index cb2b6cda26a81..0000000000000 --- a/src/legacy/ui/ui_apps/__snapshots__/ui_apps_mixin.test.js.snap +++ /dev/null @@ -1,92 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`UiAppsMixin creates kbnServer.uiApps from uiExports 1`] = ` -Array [ - StubUiApp { - "_hidden": true, - "_id": "foo", - }, - StubUiApp { - "_hidden": false, - "_id": "bar", - }, -] -`; - -exports[`UiAppsMixin decorates server with methods 1`] = ` -Array [ - Array [ - "server", - "getAllUiApps", - [Function], - ], - Array [ - "server", - "getUiAppById", - [Function], - ], - Array [ - "server", - "getHiddenUiAppById", - [Function], - ], - Array [ - "server", - "injectUiAppVars", - [Function], - ], - Array [ - "server", - "getInjectedUiAppVars", - [Function], - ], -] -`; - -exports[`UiAppsMixin server.getAllUiApps() returns hidden and non-hidden apps 1`] = ` -Array [ - StubUiApp { - "_hidden": true, - "_id": "foo", - }, - StubUiApp { - "_hidden": false, - "_id": "bar", - }, -] -`; - -exports[`UiAppsMixin server.getHiddenUiAppById() returns hidden apps when requested, undefined for non-hidden and unknown apps 1`] = ` -StubUiApp { - "_hidden": true, - "_id": "foo", -} -`; - -exports[`UiAppsMixin server.getUiAppById() returns non-hidden apps when requested, undefined for non-hidden and unknown apps 1`] = ` -StubUiApp { - "_hidden": false, - "_id": "bar", -} -`; - -exports[`UiAppsMixin server.injectUiAppVars()/server.getInjectedUiAppVars() merges injected vars provided by multiple providers in the order they are registered: foo 1`] = ` -Object { - "bar": false, - "baz": 1, - "box": true, - "foo": true, -} -`; - -exports[`UiAppsMixin server.injectUiAppVars()/server.getInjectedUiAppVars() stored injectVars provider and returns provider result when requested: bar 1`] = ` -Object { - "thisIsFoo": false, -} -`; - -exports[`UiAppsMixin server.injectUiAppVars()/server.getInjectedUiAppVars() stored injectVars provider and returns provider result when requested: foo 1`] = ` -Object { - "thisIsFoo": true, -} -`; diff --git a/src/legacy/ui/ui_apps/__tests__/ui_app.js b/src/legacy/ui/ui_apps/__tests__/ui_app.js deleted file mode 100644 index bb4bcfe2d7443..0000000000000 --- a/src/legacy/ui/ui_apps/__tests__/ui_app.js +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; - -import { UiApp } from '../ui_app'; -import { UiNavLink } from '../../ui_nav_links'; - -function createStubUiAppSpec(extraParams) { - return { - id: 'uiapp-test', - main: 'main.js', - title: 'UIApp Test', - order: 9000, - icon: 'ui_app_test.svg', - linkToLastSubUrl: true, - hidden: false, - listed: false, - ...extraParams, - }; -} - -function createStubKbnServer(extraParams) { - return { - plugins: [], - config: { - get: sinon.stub().withArgs('server.basePath').returns(''), - }, - server: {}, - ...extraParams, - }; -} - -function createUiApp(spec = createStubUiAppSpec(), kbnServer = createStubKbnServer()) { - return new UiApp(kbnServer, spec); -} - -describe('ui apps / UiApp', () => { - describe('constructor', () => { - it('throws an exception if an ID is not given', () => { - const spec = {}; // should have id property - expect(() => createUiApp(spec)).to.throwException(); - }); - - describe('defaults', () => { - const spec = { id: 'uiapp-test-defaults' }; - const app = createUiApp(spec); - - it('has the ID from the spec', () => { - expect(app.getId()).to.be(spec.id); - }); - - it('has no plugin ID', () => { - expect(app.getPluginId()).to.be(undefined); - }); - - it('is not hidden', () => { - expect(app.isHidden()).to.be(false); - }); - - it('is listed', () => { - expect(app.isListed()).to.be(true); - }); - - it('has a navLink', () => { - expect(app.getNavLink()).to.be.a(UiNavLink); - }); - - it('has no main module', () => { - expect(app.getMainModuleId()).to.be(undefined); - }); - - it('has a mostly empty JSON representation', () => { - expect(JSON.parse(JSON.stringify(app))).to.eql({ - id: spec.id, - navLink: { - id: 'uiapp-test-defaults', - order: 0, - url: '/app/uiapp-test-defaults', - subUrlBase: '/app/uiapp-test-defaults', - linkToLastSubUrl: true, - hidden: false, - disabled: false, - tooltip: '', - }, - }); - }); - }); - - describe('mock spec', () => { - const spec = createStubUiAppSpec(); - const app = createUiApp(spec); - - it('has the ID from the spec', () => { - expect(app.getId()).to.be(spec.id); - }); - - it('has no plugin ID', () => { - expect(app.getPluginId()).to.be(undefined); - }); - - it('is not hidden', () => { - expect(app.isHidden()).to.be(false); - }); - - it('is also not listed', () => { - expect(app.isListed()).to.be(false); - }); - - it('has no navLink', () => { - expect(app.getNavLink()).to.be(undefined); - }); - - it('has a main module', () => { - expect(app.getMainModuleId()).to.be('main.js'); - }); - - it('has spec values in JSON representation', () => { - expect(JSON.parse(JSON.stringify(app))).to.eql({ - id: spec.id, - title: spec.title, - icon: spec.icon, - main: spec.main, - linkToLastSubUrl: spec.linkToLastSubUrl, - navLink: { - id: 'uiapp-test', - title: 'UIApp Test', - order: 9000, - url: '/app/uiapp-test', - subUrlBase: '/app/uiapp-test', - icon: 'ui_app_test.svg', - linkToLastSubUrl: true, - hidden: false, - disabled: false, - tooltip: '', - }, - }); - }); - }); - - /* - * The "hidden" and "listed" flags have an bound relationship. The "hidden" - * flag gets cast to a boolean value, and the "listed" flag is dependent on - * "hidden" - */ - describe('hidden flag', () => { - describe('is cast to boolean value', () => { - it('when undefined', () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isHidden()).to.be(false); - }); - - it('when null', () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: null, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isHidden()).to.be(false); - }); - - it('when 0', () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: 0, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isHidden()).to.be(false); - }); - - it('when true', () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: true, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isHidden()).to.be(true); - }); - - it('when 1', () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: 1, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isHidden()).to.be(true); - }); - }); - }); - - describe('listed flag', () => { - describe('defaults to the opposite value of hidden', () => { - it(`when it's null and hidden is true`, () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: true, - listed: null, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isListed()).to.be(false); - }); - - it(`when it's null and hidden is false`, () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: false, - listed: null, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isListed()).to.be(true); - }); - - it(`when it's undefined and hidden is false`, () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: false, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isListed()).to.be(true); - }); - - it(`when it's undefined and hidden is true`, () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - hidden: true, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isListed()).to.be(false); - }); - }); - - it(`is set to true when it's passed as true`, () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - listed: true, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isListed()).to.be(true); - }); - - it(`is set to false when it's passed as false`, () => { - const kbnServer = createStubKbnServer(); - const spec = { - id: 'uiapp-test', - listed: false, - }; - const newApp = new UiApp(kbnServer, spec); - expect(newApp.isListed()).to.be(false); - }); - }); - }); - - describe('pluginId', () => { - describe('does not match a kbnServer plugin', () => { - it('throws an error at instantiation', () => { - expect(() => { - createUiApp(createStubUiAppSpec({ pluginId: 'foo' })); - }).to.throwException((error) => { - expect(error.message).to.match(/Unknown plugin id/); - }); - }); - }); - }); - - describe('#getMainModuleId', () => { - it('returns undefined by default', () => { - const app = createUiApp({ id: 'foo' }); - expect(app.getMainModuleId()).to.be(undefined); - }); - - it('returns main module id', () => { - const app = createUiApp({ id: 'foo', main: 'bar' }); - expect(app.getMainModuleId()).to.be('bar'); - }); - }); -}); diff --git a/src/legacy/ui/ui_apps/index.js b/src/legacy/ui/ui_apps/index.js deleted file mode 100644 index d64848b2c1a92..0000000000000 --- a/src/legacy/ui/ui_apps/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { uiAppsMixin } from './ui_apps_mixin'; diff --git a/src/legacy/ui/ui_apps/ui_app.js b/src/legacy/ui/ui_apps/ui_app.js deleted file mode 100644 index 7da9e39394deb..0000000000000 --- a/src/legacy/ui/ui_apps/ui_app.js +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { UiNavLink } from '../ui_nav_links'; - -export class UiApp { - constructor(kbnServer, spec) { - const { - pluginId, - id = pluginId, - main, - title, - order = 0, - icon, - euiIconType, - hidden, - linkToLastSubUrl, - disableSubUrlTracking, - listed, - category, - url = `/app/${id}`, - } = spec; - - if (!id) { - throw new Error('Every app must specify an id'); - } - - this._id = id; - this._main = main; - this._title = title; - this._order = order; - this._icon = icon; - this._euiIconType = euiIconType; - this._linkToLastSubUrl = linkToLastSubUrl; - this._disableSubUrlTracking = disableSubUrlTracking; - this._category = category; - this._hidden = hidden; - this._listed = listed; - this._url = url; - this._pluginId = pluginId; - this._kbnServer = kbnServer; - - if (this._pluginId && !this._getPlugin()) { - throw new Error(`Unknown plugin id "${this._pluginId}"`); - } - - if (!this.isHidden()) { - // unless an app is hidden it gets a navlink, but we only respond to `getNavLink()` - // if the app is also listed. This means that all apps in the kibanaPayload will - // have a navLink property since that list includes all normally accessible apps - this._navLink = new UiNavLink({ - id: this._id, - title: this._title, - order: this._order, - icon: this._icon, - euiIconType: this._euiIconType, - url: this._url, - linkToLastSubUrl: this._linkToLastSubUrl, - disableSubUrlTracking: this._disableSubUrlTracking, - category: this._category, - }); - } - } - - getId() { - return this._id; - } - - getPluginId() { - const plugin = this._getPlugin(); - return plugin ? plugin.id : undefined; - } - - isHidden() { - return !!this._hidden; - } - - isListed() { - return !this.isHidden() && (this._listed == null || !!this._listed); - } - - getNavLink() { - if (this.isListed()) { - return this._navLink; - } - } - - getMainModuleId() { - return this._main; - } - - _getPlugin() { - const pluginId = this._pluginId; - const { plugins } = this._kbnServer; - - return pluginId ? plugins.find((plugin) => plugin.id === pluginId) : undefined; - } - - toJSON() { - return { - id: this._id, - title: this._title, - icon: this._icon, - euiIconType: this._euiIconType, - main: this._main, - navLink: this._navLink, - linkToLastSubUrl: this._linkToLastSubUrl, - disableSubUrlTracking: this._disableSubUrlTracking, - category: this._category, - }; - } -} diff --git a/src/legacy/ui/ui_apps/ui_apps_mixin.js b/src/legacy/ui/ui_apps/ui_apps_mixin.js deleted file mode 100644 index c80b12a46bee3..0000000000000 --- a/src/legacy/ui/ui_apps/ui_apps_mixin.js +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { UiApp } from './ui_app'; - -/** - * @typedef {import('../../server/kbn_server').default} KbnServer - */ - -/** - * - * @param {KbnServer} kbnServer - * @param {KbnServer['server']} server - */ -export function uiAppsMixin(kbnServer, server) { - const { uiAppSpecs = [] } = kbnServer.uiExports; - const existingIds = new Set(); - const appsById = new Map(); - const hiddenAppsById = new Map(); - - kbnServer.uiApps = uiAppSpecs.map((spec) => { - const app = new UiApp(kbnServer, spec); - const id = app.getId(); - - if (!existingIds.has(id)) { - existingIds.add(id); - } else { - throw new Error(`Unable to create two apps with the id ${id}.`); - } - - if (app.isHidden()) { - hiddenAppsById.set(id, app); - } else { - appsById.set(id, app); - } - - return app; - }); - - server.decorate('server', 'getAllUiApps', () => kbnServer.uiApps.slice(0)); - server.decorate('server', 'getUiAppById', (id) => appsById.get(id)); - server.decorate('server', 'getHiddenUiAppById', (id) => hiddenAppsById.get(id)); - server.decorate('server', 'injectUiAppVars', (appId, provider) => - kbnServer.newPlatform.__internals.legacy.injectUiAppVars(appId, provider) - ); - server.decorate( - 'server', - 'getInjectedUiAppVars', - async (appId) => await kbnServer.newPlatform.__internals.legacy.getInjectedUiAppVars(appId) - ); -} diff --git a/src/legacy/ui/ui_apps/ui_apps_mixin.test.js b/src/legacy/ui/ui_apps/ui_apps_mixin.test.js deleted file mode 100644 index 048358edfc10b..0000000000000 --- a/src/legacy/ui/ui_apps/ui_apps_mixin.test.js +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { LegacyInternals } from '../../../core/server'; - -import { uiAppsMixin } from './ui_apps_mixin'; - -jest.mock('./ui_app', () => ({ - UiApp: class StubUiApp { - constructor(kbnServer, spec) { - this._id = spec.id; - this._hidden = !!spec.hidden; - } - getId() { - return this._id; - } - isHidden() { - return this._hidden; - } - }, -})); - -describe('UiAppsMixin', () => { - let kbnServer; - let server; - let uiExports; - - beforeEach(() => { - uiExports = { - uiAppSpecs: [ - { - id: 'foo', - hidden: true, - }, - { - id: 'bar', - hidden: false, - }, - ], - }; - server = { - decorate: jest.fn((type, name, value) => { - if (type !== 'server') { - return; - } - - server[name] = value; - }), - }; - kbnServer = { - uiExports, - newPlatform: { - __internals: { - legacy: new LegacyInternals(uiExports, {}, server), - }, - }, - }; - - uiAppsMixin(kbnServer, server); - }); - - it('creates kbnServer.uiApps from uiExports', () => { - expect(kbnServer.uiApps).toMatchSnapshot(); - }); - - it('decorates server with methods', () => { - expect(server.decorate.mock.calls).toMatchSnapshot(); - }); - - describe('server.getAllUiApps()', () => { - it('returns hidden and non-hidden apps', () => { - expect(server.getAllUiApps()).toMatchSnapshot(); - }); - }); - - describe('server.getUiAppById()', () => { - it('returns non-hidden apps when requested, undefined for non-hidden and unknown apps', () => { - expect(server.getUiAppById('foo')).toBe(undefined); - expect(server.getUiAppById('bar')).toMatchSnapshot(); - expect(server.getUiAppById('baz')).toBe(undefined); - }); - }); - - describe('server.getHiddenUiAppById()', () => { - it('returns hidden apps when requested, undefined for non-hidden and unknown apps', () => { - expect(server.getHiddenUiAppById('foo')).toMatchSnapshot(); - expect(server.getHiddenUiAppById('bar')).toBe(undefined); - expect(server.getHiddenUiAppById('baz')).toBe(undefined); - }); - }); - - describe('server.injectUiAppVars()/server.getInjectedUiAppVars()', () => { - it('stored injectVars provider and returns provider result when requested', async () => { - server.injectUiAppVars('foo', () => ({ - thisIsFoo: true, - })); - - server.injectUiAppVars('bar', async () => ({ - thisIsFoo: false, - })); - - await expect(server.getInjectedUiAppVars('foo')).resolves.toMatchSnapshot('foo'); - await expect(server.getInjectedUiAppVars('bar')).resolves.toMatchSnapshot('bar'); - await expect(server.getInjectedUiAppVars('baz')).resolves.toEqual({}); - }); - - it('merges injected vars provided by multiple providers in the order they are registered', async () => { - server.injectUiAppVars('foo', () => ({ - foo: true, - bar: true, - baz: true, - })); - - server.injectUiAppVars('foo', async () => ({ - bar: false, - box: true, - })); - - server.injectUiAppVars('foo', async () => ({ - baz: 1, - })); - - await expect(server.getInjectedUiAppVars('foo')).resolves.toMatchSnapshot('foo'); - await expect(server.getInjectedUiAppVars('bar')).resolves.toEqual({}); - await expect(server.getInjectedUiAppVars('baz')).resolves.toEqual({}); - }); - }); -}); diff --git a/src/legacy/ui/ui_exports/README.md b/src/legacy/ui/ui_exports/README.md index ab81febe67993..7fb117b1c25b9 100644 --- a/src/legacy/ui/ui_exports/README.md +++ b/src/legacy/ui/ui_exports/README.md @@ -88,7 +88,6 @@ This reducer format was chosen so that it will be easier to look back at these r The [`ui_exports/ui_export_defaults`][UiExportDefaults] module defines the default shape of the uiExports object produced by `collectUiExports()`. The defaults generally describe the `uiExports` from the UI System itself, like default visTypes and such. -[UiApp]: ../ui_apps/ui_app.js "UiApp class definition" [UiExportDefaults]: ./ui_export_defaults.js "uiExport defaults definition" [UiExportTypes]: ./ui_export_types/index.js "Index of default ui_export_types module" [UiAppExportType]: ./ui_export_types/ui_apps.js "UiApp extension type definition" diff --git a/src/legacy/ui/ui_exports/__tests__/collect_ui_exports.js b/src/legacy/ui/ui_exports/__tests__/collect_ui_exports.js deleted file mode 100644 index 5b2af9f82333c..0000000000000 --- a/src/legacy/ui/ui_exports/__tests__/collect_ui_exports.js +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; - -import { PluginPack } from '../../../plugin_discovery'; - -import { collectUiExports } from '../collect_ui_exports'; - -const specs = new PluginPack({ - path: '/dev/null', - pkg: { - name: 'test', - version: 'kibana', - }, - provider({ Plugin }) { - return [ - new Plugin({ - id: 'test', - uiExports: { - savedObjectSchemas: { - foo: { - isNamespaceAgnostic: true, - }, - }, - }, - }), - new Plugin({ - id: 'test2', - uiExports: { - savedObjectSchemas: { - bar: { - isNamespaceAgnostic: true, - }, - }, - }, - }), - ]; - }, -}).getPluginSpecs(); - -describe('plugin discovery', () => { - describe('collectUiExports()', () => { - it('merges uiExports from all provided plugin specs', () => { - const uiExports = collectUiExports(specs); - - expect(uiExports.savedObjectSchemas).to.eql({ - foo: { - isNamespaceAgnostic: true, - }, - bar: { - isNamespaceAgnostic: true, - }, - }); - }); - - it(`throws an error when migrations and mappings aren't defined in the same plugin`, () => { - const invalidSpecs = new PluginPack({ - path: '/dev/null', - pkg: { - name: 'test', - version: 'kibana', - }, - provider({ Plugin }) { - return [ - new Plugin({ - id: 'test', - uiExports: { - mappings: { - 'test-type': { - properties: {}, - }, - }, - }, - }), - new Plugin({ - id: 'test2', - uiExports: { - migrations: { - 'test-type': { - '1.2.3': (doc) => { - return doc; - }, - }, - }, - }, - }), - ]; - }, - }).getPluginSpecs(); - expect(() => collectUiExports(invalidSpecs)).to.throwError((err) => { - expect(err).to.be.a(Error); - expect(err).to.have.property( - 'message', - 'Migrations and mappings must be defined together in the uiExports of a single plugin. ' + - 'test2 defines migrations for types test-type but does not define their mappings.' - ); - }); - }); - }); -}); diff --git a/src/legacy/ui/ui_mixin.js b/src/legacy/ui/ui_mixin.js index 54da001d20669..caa0ca4890661 100644 --- a/src/legacy/ui/ui_mixin.js +++ b/src/legacy/ui/ui_mixin.js @@ -17,10 +17,8 @@ * under the License. */ -import { uiAppsMixin } from './ui_apps'; import { uiRenderMixin } from './ui_render'; export async function uiMixin(kbnServer) { - await kbnServer.mixin(uiAppsMixin); await kbnServer.mixin(uiRenderMixin); } diff --git a/src/legacy/ui/ui_nav_links/__tests__/ui_nav_link.js b/src/legacy/ui/ui_nav_links/__tests__/ui_nav_link.js deleted file mode 100644 index 42368722f11ff..0000000000000 --- a/src/legacy/ui/ui_nav_links/__tests__/ui_nav_link.js +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; - -import { UiNavLink } from '../ui_nav_link'; - -describe('UiNavLink', () => { - describe('constructor', () => { - it('initializes the object properties as expected', () => { - const spec = { - id: 'discover', - title: 'Discover', - order: -1003, - url: '/app/discover#/', - euiIconType: 'discoverApp', - hidden: true, - disabled: true, - }; - - const link = new UiNavLink(spec); - expect(link.toJSON()).to.eql({ - id: spec.id, - title: spec.title, - order: spec.order, - url: spec.url, - subUrlBase: spec.url, - icon: spec.icon, - euiIconType: spec.euiIconType, - hidden: spec.hidden, - disabled: spec.disabled, - category: undefined, - - // defaults - linkToLastSubUrl: true, - disableSubUrlTracking: undefined, - tooltip: '', - }); - }); - - it('initializes the order property to 0 when order is not specified in the spec', () => { - const spec = { - id: 'discover', - title: 'Discover', - url: '/app/discover#/', - }; - const link = new UiNavLink(spec); - - expect(link.toJSON()).to.have.property('order', 0); - }); - - it('initializes the linkToLastSubUrl property to false when false is specified in the spec', () => { - const spec = { - id: 'discover', - title: 'Discover', - order: -1003, - url: '/app/discover#/', - linkToLastSubUrl: false, - }; - const link = new UiNavLink(spec); - - expect(link.toJSON()).to.have.property('linkToLastSubUrl', false); - }); - - it('initializes the linkToLastSubUrl property to true by default', () => { - const spec = { - id: 'discover', - title: 'Discover', - order: -1003, - url: '/app/discover#/', - }; - const link = new UiNavLink(spec); - - expect(link.toJSON()).to.have.property('linkToLastSubUrl', true); - }); - - it('initializes the hidden property to false by default', () => { - const spec = { - id: 'discover', - title: 'Discover', - order: -1003, - url: '/app/discover#/', - }; - const link = new UiNavLink(spec); - - expect(link.toJSON()).to.have.property('hidden', false); - }); - - it('initializes the disabled property to false by default', () => { - const spec = { - id: 'discover', - title: 'Discover', - order: -1003, - url: '/app/discover#/', - }; - const link = new UiNavLink(spec); - - expect(link.toJSON()).to.have.property('disabled', false); - }); - - it('initializes the tooltip property to an empty string by default', () => { - const spec = { - id: 'discover', - title: 'Discover', - order: -1003, - url: '/app/discover#/', - }; - const link = new UiNavLink(spec); - - expect(link.toJSON()).to.have.property('tooltip', ''); - }); - }); -}); diff --git a/src/legacy/ui/ui_nav_links/index.js b/src/legacy/ui/ui_nav_links/index.js deleted file mode 100644 index e725a77ee1183..0000000000000 --- a/src/legacy/ui/ui_nav_links/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { UiNavLink } from './ui_nav_link'; diff --git a/src/legacy/ui/ui_nav_links/ui_nav_link.js b/src/legacy/ui/ui_nav_links/ui_nav_link.js deleted file mode 100644 index f56809f7ebb80..0000000000000 --- a/src/legacy/ui/ui_nav_links/ui_nav_link.js +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export class UiNavLink { - constructor(spec) { - const { - id, - title, - order = 0, - url, - subUrlBase, - disableSubUrlTracking, - icon, - euiIconType, - linkToLastSubUrl = true, - hidden = false, - disabled = false, - tooltip = '', - category, - } = spec; - - this._id = id; - this._title = title; - this._order = order; - this._url = url; - this._subUrlBase = subUrlBase || url; - this._disableSubUrlTracking = disableSubUrlTracking; - this._icon = icon; - this._euiIconType = euiIconType; - this._linkToLastSubUrl = linkToLastSubUrl; - this._hidden = hidden; - this._disabled = disabled; - this._tooltip = tooltip; - this._category = category; - } - - getOrder() { - return this._order; - } - - toJSON() { - return { - id: this._id, - title: this._title, - order: this._order, - url: this._url, - subUrlBase: this._subUrlBase, - icon: this._icon, - euiIconType: this._euiIconType, - linkToLastSubUrl: this._linkToLastSubUrl, - disableSubUrlTracking: this._disableSubUrlTracking, - hidden: this._hidden, - disabled: this._disabled, - tooltip: this._tooltip, - category: this._category, - }; - } -} diff --git a/src/legacy/ui/ui_render/ui_render_mixin.js b/src/legacy/ui/ui_render/ui_render_mixin.js index 8cc2cd1321a62..e3b7c1e0c3ff9 100644 --- a/src/legacy/ui/ui_render/ui_render_mixin.js +++ b/src/legacy/ui/ui_render/ui_render_mixin.js @@ -182,49 +182,36 @@ export function uiRenderMixin(kbnServer, server, config) { path: '/app/{id}/{any*}', method: 'GET', async handler(req, h) { - const id = req.params.id; - const app = server.getUiAppById(id); try { - return await h.renderApp(app); + return await h.renderApp(); } catch (err) { throw Boom.boomify(err); } }, }); - async function renderApp( - h, - app = { getId: () => 'core' }, - includeUserSettings = true, - overrides = {} - ) { + async function renderApp(h) { + const app = { getId: () => 'core' }; const { http } = kbnServer.newPlatform.setup.core; - const { - rendering, - legacy, - savedObjectsClientProvider: savedObjects, - } = kbnServer.newPlatform.__internals; + const { savedObjects } = kbnServer.newPlatform.start.core; + const { rendering, legacy } = kbnServer.newPlatform.__internals; + const req = KibanaRequest.from(h.request); const uiSettings = kbnServer.newPlatform.start.core.uiSettings.asScopedToClient( - savedObjects.getClient(h.request) + savedObjects.getScopedClient(req) ); const vars = await legacy.getVars(app.getId(), h.request, { apmConfig: getApmConfig(h.request.path), - ...overrides, }); const content = await rendering.render(h.request, uiSettings, { app, - includeUserSettings, + includeUserSettings: true, vars, }); return h.response(content).type('text/html').header('content-security-policy', http.csp.header); } - server.decorate('toolkit', 'renderApp', function (app, overrides) { - return renderApp(this, app, true, overrides); - }); - - server.decorate('toolkit', 'renderAppWithDefaultConfig', function (app) { - return renderApp(this, app, false); + server.decorate('toolkit', 'renderApp', function () { + return renderApp(this); }); } diff --git a/src/legacy/utils/artifact_type.ts b/src/legacy/utils/artifact_type.ts index 69f728e9e2220..ef471ef8e050d 100644 --- a/src/legacy/utils/artifact_type.ts +++ b/src/legacy/utils/artifact_type.ts @@ -17,7 +17,6 @@ * under the License. */ -// eslint-disable-next-line @kbn/eslint/no-restricted-paths import { pkg } from '../../core/server/utils'; export const IS_KIBANA_DISTRIBUTABLE = pkg.build && pkg.build.distributable === true; export const IS_KIBANA_RELEASE = pkg.build && pkg.build.release === true; diff --git a/src/legacy/utils/index.d.ts b/src/legacy/utils/index.d.ts index c294c79542bbe..a57caad1d34bf 100644 --- a/src/legacy/utils/index.d.ts +++ b/src/legacy/utils/index.d.ts @@ -18,16 +18,3 @@ */ export function unset(object: object, rawPath: string): void; - -export { - concatStreamProviders, - createConcatStream, - createFilterStream, - createIntersperseStream, - createListStream, - createMapStream, - createPromiseFromStreams, - createReduceStream, - createReplaceStream, - createSplitStream, -} from './streams'; diff --git a/src/legacy/utils/index.js b/src/legacy/utils/index.js index 4274fb2e4901a..e2e2331b3aea6 100644 --- a/src/legacy/utils/index.js +++ b/src/legacy/utils/index.js @@ -17,21 +17,7 @@ * under the License. */ -export { BinderBase } from './binder'; -export { BinderFor } from './binder_for'; export { deepCloneWithBuffers } from './deep_clone_with_buffers'; export { unset } from './unset'; export { IS_KIBANA_DISTRIBUTABLE } from './artifact_type'; export { IS_KIBANA_RELEASE } from './artifact_type'; - -export { - concatStreamProviders, - createConcatStream, - createIntersperseStream, - createListStream, - createPromiseFromStreams, - createReduceStream, - createSplitStream, - createMapStream, - createReplaceStream, -} from './streams'; diff --git a/src/legacy/utils/path_contains.js b/src/legacy/utils/path_contains.js deleted file mode 100644 index 60d05c1099554..0000000000000 --- a/src/legacy/utils/path_contains.js +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { relative } from 'path'; - -export default function pathContains(root, child) { - return relative(child, root).slice(0, 2) !== '..'; -} diff --git a/src/legacy/utils/streams/concat_stream.js b/src/legacy/utils/streams/concat_stream.js deleted file mode 100644 index e3f8f7261d2b7..0000000000000 --- a/src/legacy/utils/streams/concat_stream.js +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { createReduceStream } from './reduce_stream'; - -/** - * Creates a Transform stream that consumes all provided - * values and concatenates them using each values `concat` - * method. - * - * Concatenate strings: - * createListStream(['f', 'o', 'o']) - * .pipe(createConcatStream()) - * .on('data', console.log) - * // logs "foo" - * - * Concatenate values into an array: - * createListStream([1,2,3]) - * .pipe(createConcatStream([])) - * .on('data', console.log) - * // logs "[1,2,3]" - * - * - * @param {any} initial The initial value that subsequent - * items will concat with - * @return {Transform} - */ -export function createConcatStream(initial) { - return createReduceStream((acc, chunk) => acc.concat(chunk), initial); -} diff --git a/src/legacy/utils/streams/concat_stream_providers.js b/src/legacy/utils/streams/concat_stream_providers.js deleted file mode 100644 index 11dfb84284df3..0000000000000 --- a/src/legacy/utils/streams/concat_stream_providers.js +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { PassThrough } from 'stream'; - -/** - * Write the data and errors from a list of stream providers - * to a single stream in order. Stream providers are only - * called right before they will be consumed, and only one - * provider will be active at a time. - * - * @param {Array<() => ReadableStream>} sourceProviders - * @param {PassThroughOptions} options options passed to the PassThrough constructor - * @return {WritableStream} combined stream - */ -export function concatStreamProviders(sourceProviders, options = {}) { - const destination = new PassThrough(options); - const queue = sourceProviders.slice(); - - (function pipeNext() { - const provider = queue.shift(); - - if (!provider) { - return; - } - - const source = provider(); - const isLast = !queue.length; - - // if there are more sources to pipe, hook - // into the source completion - if (!isLast) { - source.once('end', pipeNext); - } - - source - // proxy errors from the source to the destination - .once('error', (error) => destination.emit('error', error)) - // pipe the source to the destination but only proxy the - // end event if this is the last source - .pipe(destination, { end: isLast }); - })(); - - return destination; -} diff --git a/src/legacy/utils/streams/index.d.ts b/src/legacy/utils/streams/index.d.ts deleted file mode 100644 index 470b5d9fa3505..0000000000000 --- a/src/legacy/utils/streams/index.d.ts +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Readable, Writable, Transform, TransformOptions } from 'stream'; - -export function concatStreamProviders( - sourceProviders: Array<() => Readable>, - options: TransformOptions -): Transform; -export function createIntersperseStream(intersperseChunk: string | Buffer): Transform; -export function createSplitStream(splitChunk: T): Transform; -export function createListStream(items: any | any[]): Readable; -export function createReduceStream(reducer: (value: any, chunk: T, enc: string) => T): Transform; -export function createPromiseFromStreams([first, ...rest]: [Readable, ...Writable[]]): Promise< - T ->; -export function createConcatStream(initial?: any): Transform; -export function createMapStream(fn: (value: T, i: number) => void): Transform; -export function createReplaceStream(toReplace: string, replacement: string | Buffer): Transform; -export function createFilterStream(fn: (obj: T) => boolean): Transform; diff --git a/src/legacy/utils/streams/intersperse_stream.js b/src/legacy/utils/streams/intersperse_stream.js deleted file mode 100644 index 5f9f0b03cd7eb..0000000000000 --- a/src/legacy/utils/streams/intersperse_stream.js +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Transform } from 'stream'; - -/** - * Create a Transform stream that receives values in object mode, - * and intersperses a chunk between each object received. - * - * This is useful for writing lists: - * - * createListStream(['foo', 'bar']) - * .pipe(createIntersperseStream('\n')) - * .pipe(process.stdout) // outputs "foo\nbar" - * - * Combine with a concat stream to get "join" like functionality: - * - * await createPromiseFromStreams([ - * createListStream(['foo', 'bar']), - * createIntersperseStream(' '), - * createConcatStream() - * ]) // produces a single value "foo bar" - * - * @param {String|Buffer} intersperseChunk - * @return {Transform} - */ -export function createIntersperseStream(intersperseChunk) { - let first = true; - - return new Transform({ - writableObjectMode: true, - readableObjectMode: true, - transform(chunk, enc, callback) { - try { - if (first) { - first = false; - } else { - this.push(intersperseChunk); - } - - this.push(chunk); - callback(null); - } catch (err) { - callback(err); - } - }, - }); -} diff --git a/src/legacy/utils/streams/list_stream.js b/src/legacy/utils/streams/list_stream.js deleted file mode 100644 index a614620b054b7..0000000000000 --- a/src/legacy/utils/streams/list_stream.js +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Readable } from 'stream'; - -/** - * Create a Readable stream that provides the items - * from a list as objects to subscribers - * - * @param {Array} items - the list of items to provide - * @return {Readable} - */ -export function createListStream(items = []) { - const queue = [].concat(items); - - return new Readable({ - objectMode: true, - read(size) { - queue.splice(0, size).forEach((item) => { - this.push(item); - }); - - if (!queue.length) { - this.push(null); - } - }, - }); -} diff --git a/src/legacy/utils/streams/map_stream.js b/src/legacy/utils/streams/map_stream.js deleted file mode 100644 index 4e906471330f1..0000000000000 --- a/src/legacy/utils/streams/map_stream.js +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Transform } from 'stream'; - -export function createMapStream(fn) { - let i = 0; - - return new Transform({ - objectMode: true, - async transform(value, enc, done) { - try { - this.push(await fn(value, i++)); - done(); - } catch (err) { - done(err); - } - }, - }); -} diff --git a/src/legacy/utils/streams/promise_from_streams.js b/src/legacy/utils/streams/promise_from_streams.js deleted file mode 100644 index 05f6a08aa1a09..0000000000000 --- a/src/legacy/utils/streams/promise_from_streams.js +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Take an array of streams, pipe the output - * from each one into the next, listening for - * errors from any of the streams, and then resolve - * the promise once the final stream has finished - * writing/reading. - * - * If the last stream is readable, it's final value - * will be provided as the promise value. - * - * Errors emitted from any stream will cause - * the promise to be rejected with that error. - * - * @param {Array} streams - * @return {Promise} - */ - -import { pipeline, Writable } from 'stream'; - -export async function createPromiseFromStreams(streams) { - let finalChunk; - const last = streams[streams.length - 1]; - if (typeof last.read !== 'function' && streams.length === 1) { - // For a nicer error than what stream.pipeline throws - throw new Error('A minimum of 2 streams is required when a non-readable stream is given'); - } - if (typeof last.read === 'function') { - // We are pushing a writable stream to capture the last chunk - streams.push( - new Writable({ - // Use object mode even when "last" stream isn't. This allows to - // capture the last chunk as-is. - objectMode: true, - write(chunk, enc, done) { - finalChunk = chunk; - done(); - }, - }) - ); - } - return new Promise((resolve, reject) => { - pipeline(...streams, (err) => { - if (err) return reject(err); - resolve(finalChunk); - }); - }); -} diff --git a/src/legacy/utils/streams/reduce_stream.js b/src/legacy/utils/streams/reduce_stream.js deleted file mode 100644 index d66b0124d1dab..0000000000000 --- a/src/legacy/utils/streams/reduce_stream.js +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Transform } from 'stream'; - -/** - * Create a transform stream that consumes each chunk it receives - * and passes it to the reducer, which will return the new value - * for the stream. Once all chunks have been received the reduce - * stream provides the result of final call to the reducer to - * subscribers. - * - * @param {Function} - * @param {any} initial Initial value for the stream, if undefined - * then the first chunk provided is used as the - * initial value. - * @return {Transform} - */ -export function createReduceStream(reducer, initial) { - let i = -1; - let value = initial; - - // if the reducer throws an error then the value is - // considered invalid and the stream will never provide - // it to subscribers. We will also stop calling the - // reducer for any new data that is provided to us - let failed = false; - - if (typeof reducer !== 'function') { - throw new TypeError('reducer must be a function'); - } - - return new Transform({ - readableObjectMode: true, - writableObjectMode: true, - async transform(chunk, enc, callback) { - try { - if (failed) { - return callback(); - } - - i += 1; - if (i === 0 && initial === undefined) { - value = chunk; - } else { - value = await reducer(value, chunk, enc); - } - - callback(); - } catch (err) { - failed = true; - callback(err); - } - }, - - flush(callback) { - if (!failed) { - this.push(value); - } - - callback(); - }, - }); -} diff --git a/src/legacy/utils/streams/replace_stream.js b/src/legacy/utils/streams/replace_stream.js deleted file mode 100644 index 7309bd241fa52..0000000000000 --- a/src/legacy/utils/streams/replace_stream.js +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Transform } from 'stream'; - -export function createReplaceStream(toReplace, replacement) { - if (typeof toReplace !== 'string') { - throw new TypeError('toReplace must be a string'); - } - - let buffer = Buffer.alloc(0); - return new Transform({ - objectMode: false, - async transform(value, enc, done) { - try { - buffer = Buffer.concat([buffer, value], buffer.length + value.length); - - while (true) { - // try to find the next instance of `toReplace` in buffer - const index = buffer.indexOf(toReplace); - - // if there is no next instance, break - if (index === -1) { - break; - } - - // flush everything to the left of the next instance - // of `toReplace` - this.push(buffer.slice(0, index)); - - // then flush an instance of `replacement` - this.push(replacement); - - // and finally update the buffer to include everything - // to the right of `toReplace`, dropping to replace from the buffer - buffer = buffer.slice(index + toReplace.length); - } - - // until now we have only flushed data that is to the left - // of a discovered instance of `toReplace`. If `toReplace` is - // never found this would lead to us buffering the entire stream. - // - // Instead, we only keep enough buffer to complete a potentially - // partial instance of `toReplace` - if (buffer.length > toReplace.length) { - // the entire buffer except the last `toReplace.length` bytes - // so that if all but one byte from `toReplace` is in the buffer, - // and the next chunk delivers the necessary byte, the buffer will then - // contain a complete `toReplace` token. - this.push(buffer.slice(0, buffer.length - toReplace.length)); - buffer = buffer.slice(-toReplace.length); - } - - done(); - } catch (err) { - done(err); - } - }, - - flush(callback) { - if (buffer.length) { - this.push(buffer); - } - - buffer = null; - callback(); - }, - }); -} diff --git a/src/legacy/utils/streams/split_stream.js b/src/legacy/utils/streams/split_stream.js deleted file mode 100644 index f55cbc7bd290d..0000000000000 --- a/src/legacy/utils/streams/split_stream.js +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { Transform } from 'stream'; - -/** - * Creates a Transform stream that consumes a stream of Buffers - * and produces a stream of strings (in object mode) by splitting - * the received bytes using the splitChunk. - * - * Ways this is behaves like String#split: - * - instances of splitChunk are removed from the input - * - splitChunk can be on any size - * - if there are no bytes found after the last splitChunk - * a final empty chunk is emitted - * - * Ways this deviates from String#split: - * - splitChunk cannot be a regexp - * - an empty string or Buffer will not produce a stream of individual - * bytes like `string.split('')` would - * - * @param {String} splitChunk - * @return {Transform} - */ -export function createSplitStream(splitChunk) { - let unsplitBuffer = Buffer.alloc(0); - - return new Transform({ - writableObjectMode: false, - readableObjectMode: true, - transform(chunk, enc, callback) { - try { - let i; - let toSplit = Buffer.concat([unsplitBuffer, chunk]); - while ((i = toSplit.indexOf(splitChunk)) !== -1) { - const slice = toSplit.slice(0, i); - toSplit = toSplit.slice(i + splitChunk.length); - this.push(slice.toString('utf8')); - } - - unsplitBuffer = toSplit; - callback(null); - } catch (err) { - callback(err); - } - }, - - flush(callback) { - try { - this.push(unsplitBuffer.toString('utf8')); - - callback(null); - } catch (err) { - callback(err); - } - }, - }); -} diff --git a/src/plugins/advanced_settings/public/management_app/advanced_settings.test.tsx b/src/plugins/advanced_settings/public/management_app/advanced_settings.test.tsx index 6103041cf0a4c..68a21c6a1587c 100644 --- a/src/plugins/advanced_settings/public/management_app/advanced_settings.test.tsx +++ b/src/plugins/advanced_settings/public/management_app/advanced_settings.test.tsx @@ -32,10 +32,6 @@ import { AdvancedSettingsComponent } from './advanced_settings'; import { notificationServiceMock, docLinksServiceMock } from '../../../../core/public/mocks'; import { ComponentRegistry } from '../component_registry'; -jest.mock('ui/new_platform', () => ({ - npStart: mockConfig(), -})); - jest.mock('./components/field', () => ({ Field: () => { return 'field'; diff --git a/src/plugins/console/server/lib/spec_definitions/js/mappings.ts b/src/plugins/console/server/lib/spec_definitions/js/mappings.ts index d09637b05a3cb..aa09278d07553 100644 --- a/src/plugins/console/server/lib/spec_definitions/js/mappings.ts +++ b/src/plugins/console/server/lib/spec_definitions/js/mappings.ts @@ -17,8 +17,6 @@ * under the License. */ -import _ from 'lodash'; - import { SpecDefinitionsService } from '../../../services'; import { BOOLEAN } from './shared'; @@ -159,28 +157,25 @@ export const mappings = (specService: SpecDefinitionsService) => { // dates format: { - __one_of: _.flatten([ - _.map( - [ - 'date', - 'date_time', - 'date_time_no_millis', - 'ordinal_date', - 'ordinal_date_time', - 'ordinal_date_time_no_millis', - 'time', - 'time_no_millis', - 't_time', - 't_time_no_millis', - 'week_date', - 'week_date_time', - 'week_date_time_no_millis', - ], - function (s) { - return ['basic_' + s, 'strict_' + s]; - } - ), - [ + __one_of: [ + ...[ + 'date', + 'date_time', + 'date_time_no_millis', + 'ordinal_date', + 'ordinal_date_time', + 'ordinal_date_time_no_millis', + 'time', + 'time_no_millis', + 't_time', + 't_time_no_millis', + 'week_date', + 'week_date_time', + 'week_date_time_no_millis', + ].map(function (s) { + return ['basic_' + s, 'strict_' + s]; + }), + ...[ 'date', 'date_hour', 'date_hour_minute', @@ -214,7 +209,7 @@ export const mappings = (specService: SpecDefinitionsService) => { 'epoch_millis', 'epoch_second', ], - ]), + ], }, fielddata: { diff --git a/src/plugins/dashboard/public/application/actions/index.ts b/src/plugins/dashboard/public/application/actions/index.ts index 4343a3409b696..cd32c2025456f 100644 --- a/src/plugins/dashboard/public/application/actions/index.ts +++ b/src/plugins/dashboard/public/application/actions/index.ts @@ -42,3 +42,8 @@ export { UnlinkFromLibraryActionContext, ACTION_UNLINK_FROM_LIBRARY, } from './unlink_from_library_action'; +export { + LibraryNotificationActionContext, + LibraryNotificationAction, + ACTION_LIBRARY_NOTIFICATION, +} from './library_notification_action'; diff --git a/src/plugins/dashboard/public/application/actions/library_notification_action.test.tsx b/src/plugins/dashboard/public/application/actions/library_notification_action.test.tsx new file mode 100644 index 0000000000000..385f6f14ba94c --- /dev/null +++ b/src/plugins/dashboard/public/application/actions/library_notification_action.test.tsx @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { isErrorEmbeddable, ReferenceOrValueEmbeddable } from '../../embeddable_plugin'; +import { DashboardContainer } from '../embeddable'; +import { getSampleDashboardInput } from '../test_helpers'; +import { + CONTACT_CARD_EMBEDDABLE, + ContactCardEmbeddableFactory, + ContactCardEmbeddable, + ContactCardEmbeddableInput, + ContactCardEmbeddableOutput, +} from '../../embeddable_plugin_test_samples'; +import { coreMock } from '../../../../../core/public/mocks'; +import { CoreStart } from 'kibana/public'; +import { LibraryNotificationAction } from '.'; +import { embeddablePluginMock } from 'src/plugins/embeddable/public/mocks'; +import { ViewMode } from '../../../../embeddable/public'; + +const { setup, doStart } = embeddablePluginMock.createInstance(); +setup.registerEmbeddableFactory( + CONTACT_CARD_EMBEDDABLE, + new ContactCardEmbeddableFactory((() => null) as any, {} as any) +); +const start = doStart(); + +let container: DashboardContainer; +let embeddable: ContactCardEmbeddable & ReferenceOrValueEmbeddable; +let coreStart: CoreStart; +beforeEach(async () => { + coreStart = coreMock.createStart(); + + const containerOptions = { + ExitFullScreenButton: () => null, + SavedObjectFinder: () => null, + application: {} as any, + embeddable: start, + inspector: {} as any, + notifications: {} as any, + overlays: coreStart.overlays, + savedObjectMetaData: {} as any, + uiActions: {} as any, + }; + + container = new DashboardContainer(getSampleDashboardInput(), containerOptions); + + const contactCardEmbeddable = await container.addNewEmbeddable< + ContactCardEmbeddableInput, + ContactCardEmbeddableOutput, + ContactCardEmbeddable + >(CONTACT_CARD_EMBEDDABLE, { + firstName: 'Kibanana', + }); + + if (isErrorEmbeddable(contactCardEmbeddable)) { + throw new Error('Failed to create embeddable'); + } + embeddable = embeddablePluginMock.mockRefOrValEmbeddable< + ContactCardEmbeddable, + ContactCardEmbeddableInput + >(contactCardEmbeddable, { + mockedByReferenceInput: { savedObjectId: 'testSavedObjectId', id: contactCardEmbeddable.id }, + mockedByValueInput: { firstName: 'Kibanana', id: contactCardEmbeddable.id }, + }); + embeddable.updateInput({ viewMode: ViewMode.EDIT }); +}); + +test('Notification is shown when embeddable on dashboard has reference type input', async () => { + const action = new LibraryNotificationAction(); + embeddable.updateInput(await embeddable.getInputAsRefType()); + expect(await action.isCompatible({ embeddable })).toBe(true); +}); + +test('Notification is not shown when embeddable input is by value', async () => { + const action = new LibraryNotificationAction(); + embeddable.updateInput(await embeddable.getInputAsValueType()); + expect(await action.isCompatible({ embeddable })).toBe(false); +}); + +test('Notification is not shown when view mode is set to view', async () => { + const action = new LibraryNotificationAction(); + embeddable.updateInput(await embeddable.getInputAsRefType()); + embeddable.updateInput({ viewMode: ViewMode.VIEW }); + expect(await action.isCompatible({ embeddable })).toBe(false); +}); diff --git a/src/plugins/dashboard/public/application/actions/library_notification_action.tsx b/src/plugins/dashboard/public/application/actions/library_notification_action.tsx new file mode 100644 index 0000000000000..974b55275ccc1 --- /dev/null +++ b/src/plugins/dashboard/public/application/actions/library_notification_action.tsx @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import { i18n } from '@kbn/i18n'; +import { EuiBadge } from '@elastic/eui'; +import { IEmbeddable, ViewMode, isReferenceOrValueEmbeddable } from '../../embeddable_plugin'; +import { ActionByType, IncompatibleActionError } from '../../ui_actions_plugin'; +import { reactToUiComponent } from '../../../../kibana_react/public'; + +export const ACTION_LIBRARY_NOTIFICATION = 'ACTION_LIBRARY_NOTIFICATION'; + +export interface LibraryNotificationActionContext { + embeddable: IEmbeddable; +} + +export class LibraryNotificationAction implements ActionByType { + public readonly id = ACTION_LIBRARY_NOTIFICATION; + public readonly type = ACTION_LIBRARY_NOTIFICATION; + public readonly order = 1; + + private displayName = i18n.translate('dashboard.panel.LibraryNotification', { + defaultMessage: 'Library', + }); + + private icon = 'folderCheck'; + + public readonly MenuItem = reactToUiComponent(() => ( + + {this.displayName} + + )); + + public getDisplayName({ embeddable }: LibraryNotificationActionContext) { + if (!embeddable.getRoot() || !embeddable.getRoot().isContainer) { + throw new IncompatibleActionError(); + } + return this.displayName; + } + + public getIconType({ embeddable }: LibraryNotificationActionContext) { + if (!embeddable.getRoot() || !embeddable.getRoot().isContainer) { + throw new IncompatibleActionError(); + } + return this.icon; + } + + public getDisplayNameTooltip = ({ embeddable }: LibraryNotificationActionContext) => { + if (!embeddable.getRoot() || !embeddable.getRoot().isContainer) { + throw new IncompatibleActionError(); + } + return i18n.translate('dashboard.panel.libraryNotification.toolTip', { + defaultMessage: + 'This panel is linked to a Library item. Editing the panel might affect other dashboards.', + }); + }; + + public isCompatible = async ({ embeddable }: LibraryNotificationActionContext) => { + return ( + embeddable.getInput()?.viewMode !== ViewMode.VIEW && + isReferenceOrValueEmbeddable(embeddable) && + embeddable.inputIsRefType(embeddable.getInput()) + ); + }; + + public execute = async () => {}; +} diff --git a/src/plugins/dashboard/public/application/actions/open_replace_panel_flyout.tsx b/src/plugins/dashboard/public/application/actions/open_replace_panel_flyout.tsx index c676ca052d687..54a294fd2f4ac 100644 --- a/src/plugins/dashboard/public/application/actions/open_replace_panel_flyout.tsx +++ b/src/plugins/dashboard/public/application/actions/open_replace_panel_flyout.tsx @@ -60,7 +60,8 @@ export async function openReplacePanelFlyout(options: { /> ), { - 'data-test-subj': 'replacePanelFlyout', + 'data-test-subj': 'dashboardReplacePanel', + ownFocus: true, } ); } diff --git a/src/plugins/dashboard/public/application/actions/replace_panel_flyout.tsx b/src/plugins/dashboard/public/application/actions/replace_panel_flyout.tsx index 0000f63c48c2d..4e228bc1a7a06 100644 --- a/src/plugins/dashboard/public/application/actions/replace_panel_flyout.tsx +++ b/src/plugins/dashboard/public/application/actions/replace_panel_flyout.tsx @@ -19,16 +19,15 @@ import { i18n } from '@kbn/i18n'; import React from 'react'; -import _ from 'lodash'; -import { EuiFlyout, EuiFlyoutBody, EuiFlyoutHeader, EuiTitle } from '@elastic/eui'; +import { EuiFlyoutBody, EuiFlyoutHeader, EuiTitle } from '@elastic/eui'; import { NotificationsStart, Toast } from 'src/core/public'; import { DashboardPanelState } from '../embeddable'; import { - IContainer, - IEmbeddable, EmbeddableInput, EmbeddableOutput, EmbeddableStart, + IContainer, + IEmbeddable, SavedObjectEmbeddableInput, } from '../../embeddable_plugin'; @@ -122,7 +121,7 @@ export class ReplacePanelFlyout extends React.Component { const panelToReplace = 'Replace panel ' + this.props.panelToRemove.getTitle() + ' with:'; return ( - + <>

@@ -131,7 +130,7 @@ export class ReplacePanelFlyout extends React.Component { {savedObjectsFinder} - + ); } } diff --git a/src/plugins/dashboard/public/application/dashboard_app_controller.tsx b/src/plugins/dashboard/public/application/dashboard_app_controller.tsx index 0d20fdee07df5..212b54be9ae04 100644 --- a/src/plugins/dashboard/public/application/dashboard_app_controller.tsx +++ b/src/plugins/dashboard/public/application/dashboard_app_controller.tsx @@ -24,9 +24,18 @@ import { EuiCheckboxGroupIdToSelectedMap } from '@elastic/eui/src/components/for import React, { useState, ReactElement } from 'react'; import ReactDOM from 'react-dom'; import angular from 'angular'; +import deepEqual from 'fast-deep-equal'; import { Observable, pipe, Subscription, merge } from 'rxjs'; -import { filter, map, debounceTime, mapTo, startWith, switchMap } from 'rxjs/operators'; +import { + filter, + map, + debounceTime, + mapTo, + startWith, + switchMap, + distinctUntilChanged, +} from 'rxjs/operators'; import { History } from 'history'; import { SavedObjectSaveOpts } from 'src/plugins/saved_objects/public'; import { NavigationPublicPluginStart as NavigationStart } from 'src/plugins/navigation/public'; @@ -279,6 +288,12 @@ export class DashboardAppController { const updateIndexPatternsOperator = pipe( filter((container: DashboardContainer) => !!container && !isErrorEmbeddable(container)), map(getDashboardIndexPatterns), + distinctUntilChanged((a, b) => + deepEqual( + a.map((ip) => ip.id), + b.map((ip) => ip.id) + ) + ), // using switchMap for previous task cancellation switchMap((panelIndexPatterns: IndexPattern[]) => { return new Observable((observer) => { @@ -405,17 +420,29 @@ export class DashboardAppController { ) : null; }; - outputSubscription = new Subscription(); - outputSubscription.add( - dashboardContainer - .getOutput$() - .pipe( - mapTo(dashboardContainer), - startWith(dashboardContainer), // to trigger initial index pattern update - updateIndexPatternsOperator + outputSubscription = merge( + // output of dashboard container itself + dashboardContainer.getOutput$(), + // plus output of dashboard container children, + // children may change, so make sure we subscribe/unsubscribe with switchMap + dashboardContainer.getOutput$().pipe( + map(() => dashboardContainer!.getChildIds()), + distinctUntilChanged(deepEqual), + switchMap((newChildIds: string[]) => + merge( + ...newChildIds.map((childId) => + dashboardContainer!.getChild(childId).getOutput$() + ) + ) ) - .subscribe() - ); + ) + ) + .pipe( + mapTo(dashboardContainer), + startWith(dashboardContainer), // to trigger initial index pattern update + updateIndexPatternsOperator + ) + .subscribe(); inputSubscription = dashboardContainer.getInput$().subscribe(() => { let dirty = false; diff --git a/src/plugins/dashboard/public/attribute_service/attribute_service.test.ts b/src/plugins/dashboard/public/attribute_service/attribute_service.test.ts new file mode 100644 index 0000000000000..06f380ca3862b --- /dev/null +++ b/src/plugins/dashboard/public/attribute_service/attribute_service.test.ts @@ -0,0 +1,193 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { ATTRIBUTE_SERVICE_KEY } from './attribute_service'; +import { mockAttributeService } from './attribute_service_mock'; +import { coreMock } from '../../../../core/public/mocks'; + +interface TestAttributes { + title: string; + testAttr1?: string; + testAttr2?: { array: unknown[]; testAttr3: string }; +} + +interface TestByValueInput { + id: string; + [ATTRIBUTE_SERVICE_KEY]: TestAttributes; +} + +describe('attributeService', () => { + const defaultTestType = 'defaultTestType'; + let attributes: TestAttributes; + let byValueInput: TestByValueInput; + let byReferenceInput: { id: string; savedObjectId: string }; + + beforeEach(() => { + attributes = { + title: 'ultra title', + testAttr1: 'neat first attribute', + testAttr2: { array: [1, 2, 3], testAttr3: 'super attribute' }, + }; + byValueInput = { + id: '456', + attributes, + }; + byReferenceInput = { + id: '456', + savedObjectId: '123', + }; + }); + + describe('determining input type', () => { + const defaultAttributeService = mockAttributeService(defaultTestType); + const customAttributeService = mockAttributeService( + defaultTestType + ); + + it('can determine input type given default types', () => { + expect( + defaultAttributeService.inputIsRefType({ id: '456', savedObjectId: '123' }) + ).toBeTruthy(); + expect( + defaultAttributeService.inputIsRefType({ + id: '456', + attributes: { title: 'wow I am by value' }, + }) + ).toBeFalsy(); + }); + it('can determine input type given custom types', () => { + expect( + customAttributeService.inputIsRefType({ id: '456', savedObjectId: '123' }) + ).toBeTruthy(); + expect( + customAttributeService.inputIsRefType({ + id: '456', + [ATTRIBUTE_SERVICE_KEY]: { title: 'wow I am by value' }, + }) + ).toBeFalsy(); + }); + }); + + describe('unwrapping attributes', () => { + it('can unwrap all default attributes when given reference type input', async () => { + const core = coreMock.createStart(); + core.savedObjects.client.get = jest.fn().mockResolvedValueOnce({ + attributes, + }); + const attributeService = mockAttributeService( + defaultTestType, + undefined, + core + ); + expect(await attributeService.unwrapAttributes(byReferenceInput)).toEqual(attributes); + }); + + it('returns attributes when when given value type input', async () => { + const attributeService = mockAttributeService(defaultTestType); + expect(await attributeService.unwrapAttributes(byValueInput)).toEqual(attributes); + }); + + it('runs attributes through a custom unwrap method', async () => { + const core = coreMock.createStart(); + core.savedObjects.client.get = jest.fn().mockResolvedValueOnce({ + attributes, + }); + const attributeService = mockAttributeService( + defaultTestType, + { + customUnwrapMethod: (savedObject) => ({ + ...savedObject.attributes, + testAttr2: { array: [1, 2, 3, 4, 5], testAttr3: 'kibanana' }, + }), + }, + core + ); + expect(await attributeService.unwrapAttributes(byReferenceInput)).toEqual({ + ...attributes, + testAttr2: { array: [1, 2, 3, 4, 5], testAttr3: 'kibanana' }, + }); + }); + }); + + describe('wrapping attributes', () => { + it('returns given attributes when use ref type is false', async () => { + const attributeService = mockAttributeService(defaultTestType); + expect(await attributeService.wrapAttributes(attributes, false)).toEqual({ attributes }); + }); + + it('updates existing saved object with new attributes when given id', async () => { + const core = coreMock.createStart(); + const attributeService = mockAttributeService( + defaultTestType, + undefined, + core + ); + expect(await attributeService.wrapAttributes(attributes, true, byReferenceInput)).toEqual( + byReferenceInput + ); + expect(core.savedObjects.client.update).toHaveBeenCalledWith( + defaultTestType, + '123', + attributes + ); + }); + + it('creates new saved object with attributes when given no id', async () => { + const core = coreMock.createStart(); + core.savedObjects.client.create = jest.fn().mockResolvedValueOnce({ + id: '678', + }); + const attributeService = mockAttributeService( + defaultTestType, + undefined, + core + ); + expect(await attributeService.wrapAttributes(attributes, true)).toEqual({ + savedObjectId: '678', + }); + expect(core.savedObjects.client.create).toHaveBeenCalledWith(defaultTestType, attributes); + }); + + it('uses custom save method when given an id', async () => { + const customSaveMethod = jest.fn().mockReturnValue({ id: '123' }); + const attributeService = mockAttributeService(defaultTestType, { + customSaveMethod, + }); + expect(await attributeService.wrapAttributes(attributes, true, byReferenceInput)).toEqual( + byReferenceInput + ); + expect(customSaveMethod).toHaveBeenCalledWith( + defaultTestType, + attributes, + byReferenceInput.savedObjectId + ); + }); + + it('uses custom save method given no id', async () => { + const customSaveMethod = jest.fn().mockReturnValue({ id: '678' }); + const attributeService = mockAttributeService(defaultTestType, { + customSaveMethod, + }); + expect(await attributeService.wrapAttributes(attributes, true)).toEqual({ + savedObjectId: '678', + }); + expect(customSaveMethod).toHaveBeenCalledWith(defaultTestType, attributes, undefined); + }); + }); +}); diff --git a/src/plugins/dashboard/public/attribute_service/attribute_service.tsx b/src/plugins/dashboard/public/attribute_service/attribute_service.tsx index fe5f6a0c8e2bd..a36363d22d87d 100644 --- a/src/plugins/dashboard/public/attribute_service/attribute_service.tsx +++ b/src/plugins/dashboard/public/attribute_service/attribute_service.tsx @@ -19,11 +19,16 @@ import React from 'react'; import { i18n } from '@kbn/i18n'; +import { get } from 'lodash'; import { EmbeddableInput, SavedObjectEmbeddableInput, isSavedObjectEmbeddableInput, IEmbeddable, + Container, + EmbeddableStart, + EmbeddableFactory, + EmbeddableFactoryNotFoundError, } from '../embeddable_plugin'; import { SavedObjectsClientContract, @@ -34,17 +39,10 @@ import { } from '../../../../core/public'; import { SavedObjectSaveModal, - showSaveModal, OnSaveProps, SaveResult, checkForDuplicateTitle, } from '../../../saved_objects/public'; -import { - EmbeddableStart, - EmbeddableFactory, - EmbeddableFactoryNotFoundError, - Container, -} from '../../../embeddable/public'; /** * The attribute service is a shared, generic service that embeddables can use to provide the functionality @@ -52,26 +50,46 @@ import { * can also be used as a higher level wrapper to transform an embeddable input shape that references a saved object * into an embeddable input shape that contains that saved object's attributes by value. */ +export const ATTRIBUTE_SERVICE_KEY = 'attributes'; + +export interface AttributeServiceOptions { + customSaveMethod?: ( + type: string, + attributes: A, + savedObjectId?: string + ) => Promise<{ id: string }>; + customUnwrapMethod?: (savedObject: SimpleSavedObject) => A; +} + export class AttributeService< SavedObjectAttributes extends { title: string }, - ValType extends EmbeddableInput & { attributes: SavedObjectAttributes }, - RefType extends SavedObjectEmbeddableInput + ValType extends EmbeddableInput & { + [ATTRIBUTE_SERVICE_KEY]: SavedObjectAttributes; + } = EmbeddableInput & { [ATTRIBUTE_SERVICE_KEY]: SavedObjectAttributes }, + RefType extends SavedObjectEmbeddableInput = SavedObjectEmbeddableInput > { - private embeddableFactory: EmbeddableFactory; + private embeddableFactory?: EmbeddableFactory; constructor( private type: string, + private showSaveModal: ( + saveModal: React.ReactElement, + I18nContext: I18nStart['Context'] + ) => void, private savedObjectsClient: SavedObjectsClientContract, private overlays: OverlayStart, private i18nContext: I18nStart['Context'], private toasts: NotificationsStart['toasts'], - getEmbeddableFactory: EmbeddableStart['getEmbeddableFactory'] + getEmbeddableFactory?: EmbeddableStart['getEmbeddableFactory'], + private options?: AttributeServiceOptions ) { - const factory = getEmbeddableFactory(this.type); - if (!factory) { - throw new EmbeddableFactoryNotFoundError(this.type); + if (getEmbeddableFactory) { + const factory = getEmbeddableFactory(this.type); + if (!factory) { + throw new EmbeddableFactoryNotFoundError(this.type); + } + this.embeddableFactory = factory; } - this.embeddableFactory = factory; } public async unwrapAttributes(input: RefType | ValType): Promise { @@ -79,43 +97,54 @@ export class AttributeService< const savedObject: SimpleSavedObject = await this.savedObjectsClient.get< SavedObjectAttributes >(this.type, input.savedObjectId); - return savedObject.attributes; + return this.options?.customUnwrapMethod + ? this.options?.customUnwrapMethod(savedObject) + : { ...savedObject.attributes }; } - return input.attributes; + return input[ATTRIBUTE_SERVICE_KEY]; } public async wrapAttributes( newAttributes: SavedObjectAttributes, useRefType: boolean, - embeddable?: IEmbeddable + input?: ValType | RefType ): Promise> { + const originalInput = input ? input : {}; const savedObjectId = - embeddable && isSavedObjectEmbeddableInput(embeddable.getInput()) - ? (embeddable.getInput() as SavedObjectEmbeddableInput).savedObjectId + input && this.inputIsRefType(input) + ? (input as SavedObjectEmbeddableInput).savedObjectId : undefined; if (!useRefType) { - return { attributes: newAttributes } as ValType; - } else { - try { - if (savedObjectId) { - await this.savedObjectsClient.update(this.type, savedObjectId, newAttributes); - return { savedObjectId } as RefType; - } else { - const savedItem = await this.savedObjectsClient.create(this.type, newAttributes); - return { savedObjectId: savedItem.id } as RefType; - } - } catch (error) { - this.toasts.addDanger({ - title: i18n.translate('dashboard.attributeService.saveToLibraryError', { - defaultMessage: `Panel was not saved to the library. Error: {errorMessage}`, - values: { - errorMessage: error.message, - }, - }), - 'data-test-subj': 'saveDashboardFailure', - }); - return Promise.reject({ error }); + return { [ATTRIBUTE_SERVICE_KEY]: newAttributes } as ValType; + } + try { + if (this.options?.customSaveMethod) { + const savedItem = await this.options.customSaveMethod( + this.type, + newAttributes, + savedObjectId + ); + return { ...originalInput, savedObjectId: savedItem.id } as RefType; + } + + if (savedObjectId) { + await this.savedObjectsClient.update(this.type, savedObjectId, newAttributes); + return { ...originalInput, savedObjectId } as RefType; } + + const savedItem = await this.savedObjectsClient.create(this.type, newAttributes); + return { ...originalInput, savedObjectId: savedItem.id } as RefType; + } catch (error) { + this.toasts.addDanger({ + title: i18n.translate('dashboard.attributeService.saveToLibraryError', { + defaultMessage: `Panel was not saved to the library. Error: {errorMessage}`, + values: { + errorMessage: error.message, + }, + }), + 'data-test-subj': 'saveDashboardFailure', + }); + return Promise.reject({ error }); } } @@ -146,7 +175,7 @@ export class AttributeService< getInputAsRefType = async ( input: ValType | RefType, - saveOptions?: { showSaveModal: boolean } | { title: string } + saveOptions?: { showSaveModal: boolean; saveModalTitle?: string } | { title: string } ): Promise => { if (this.inputIsRefType(input)) { return input; @@ -159,7 +188,7 @@ export class AttributeService< copyOnSave: false, lastSavedTitle: '', getEsType: () => this.type, - getDisplayName: this.embeddableFactory.getDisplayName, + getDisplayName: this.embeddableFactory?.getDisplayName || (() => this.type), }, props.isTitleDuplicateConfirmed, props.onTitleDuplicate, @@ -169,7 +198,7 @@ export class AttributeService< } ); try { - const newAttributes = { ...input.attributes }; + const newAttributes = { ...input[ATTRIBUTE_SERVICE_KEY] }; newAttributes.title = props.newTitle; const wrappedInput = (await this.wrapAttributes(newAttributes, true)) as RefType; resolve(wrappedInput); @@ -181,11 +210,11 @@ export class AttributeService< }; if (saveOptions && (saveOptions as { showSaveModal: boolean }).showSaveModal) { - showSaveModal( + this.showSaveModal( reject()} - title={input.attributes.title} + title={get(saveOptions, 'saveModalTitle', input[ATTRIBUTE_SERVICE_KEY].title)} showCopyOnSave={false} objectType={this.type} showDescription={false} diff --git a/src/plugins/dashboard/public/attribute_service/attribute_service_mock.tsx b/src/plugins/dashboard/public/attribute_service/attribute_service_mock.tsx new file mode 100644 index 0000000000000..321a53361fc7a --- /dev/null +++ b/src/plugins/dashboard/public/attribute_service/attribute_service_mock.tsx @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { EmbeddableInput, SavedObjectEmbeddableInput } from '../embeddable_plugin'; +import { coreMock } from '../../../../core/public/mocks'; +import { AttributeServiceOptions } from './attribute_service'; +import { CoreStart } from '../../../../core/public'; +import { AttributeService, ATTRIBUTE_SERVICE_KEY } from '..'; + +export const mockAttributeService = < + A extends { title: string }, + V extends EmbeddableInput & { [ATTRIBUTE_SERVICE_KEY]: A } = EmbeddableInput & { + [ATTRIBUTE_SERVICE_KEY]: A; + }, + R extends SavedObjectEmbeddableInput = SavedObjectEmbeddableInput +>( + type: string, + options?: AttributeServiceOptions, + customCore?: jest.Mocked +): AttributeService => { + const core = customCore ? customCore : coreMock.createStart(); + const service = new AttributeService( + type, + jest.fn(), + core.savedObjects.client, + core.overlays, + core.i18n.Context, + core.notifications.toasts, + jest.fn().mockReturnValue(() => ({ getDisplayName: () => type })), + options + ); + return service; +}; diff --git a/src/plugins/dashboard/public/index.ts b/src/plugins/dashboard/public/index.ts index 8a9954cc77a2e..e22d1f038a456 100644 --- a/src/plugins/dashboard/public/index.ts +++ b/src/plugins/dashboard/public/index.ts @@ -40,7 +40,7 @@ export { export { addEmbeddableToDashboardUrl } from './url_utils/url_helper'; export { SavedObjectDashboard } from './saved_dashboards'; export { SavedDashboardPanel } from './types'; -export { AttributeService } from './attribute_service/attribute_service'; +export { AttributeService, ATTRIBUTE_SERVICE_KEY } from './attribute_service/attribute_service'; export function plugin(initializerContext: PluginInitializerContext) { return new DashboardPlugin(initializerContext); diff --git a/src/plugins/dashboard/public/plugin.tsx b/src/plugins/dashboard/public/plugin.tsx index 8b9b92faf9031..0ce6f9489ea02 100644 --- a/src/plugins/dashboard/public/plugin.tsx +++ b/src/plugins/dashboard/public/plugin.tsx @@ -40,6 +40,7 @@ import { EmbeddableStart, SavedObjectEmbeddableInput, EmbeddableInput, + PANEL_NOTIFICATION_TRIGGER, } from '../../embeddable/public'; import { DataPublicPluginSetup, DataPublicPluginStart, esFilters } from '../../data/public'; import { SharePluginSetup, SharePluginStart, UrlGeneratorContract } from '../../share/public'; @@ -51,6 +52,7 @@ import { getSavedObjectFinder, SavedObjectLoader, SavedObjectsStart, + showSaveModal, } from '../../saved_objects/public'; import { ExitFullScreenButton as ExitFullScreenButtonUi, @@ -83,6 +85,12 @@ import { ACTION_UNLINK_FROM_LIBRARY, UnlinkFromLibraryActionContext, UnlinkFromLibraryAction, + ACTION_ADD_TO_LIBRARY, + AddToLibraryActionContext, + AddToLibraryAction, + ACTION_LIBRARY_NOTIFICATION, + LibraryNotificationActionContext, + LibraryNotificationAction, } from './application'; import { createDashboardUrlGenerator, @@ -96,10 +104,9 @@ import { PlaceholderEmbeddableFactory } from './application/embeddable/placehold import { UrlGeneratorState } from '../../share/public'; import { AttributeService } from '.'; import { - AddToLibraryAction, - ACTION_ADD_TO_LIBRARY, - AddToLibraryActionContext, -} from './application/actions/add_to_library_action'; + AttributeServiceOptions, + ATTRIBUTE_SERVICE_KEY, +} from './attribute_service/attribute_service'; declare module '../../share/public' { export interface UrlGeneratorStateMapping { @@ -148,10 +155,13 @@ export interface DashboardStart { DashboardContainerByValueRenderer: ReturnType; getAttributeService: < A extends { title: string }, - V extends EmbeddableInput & { attributes: A }, - R extends SavedObjectEmbeddableInput + V extends EmbeddableInput & { [ATTRIBUTE_SERVICE_KEY]: A } = EmbeddableInput & { + [ATTRIBUTE_SERVICE_KEY]: A; + }, + R extends SavedObjectEmbeddableInput = SavedObjectEmbeddableInput >( - type: string + type: string, + options?: AttributeServiceOptions ) => AttributeService; } @@ -162,6 +172,7 @@ declare module '../../../plugins/ui_actions/public' { [ACTION_CLONE_PANEL]: ClonePanelActionContext; [ACTION_ADD_TO_LIBRARY]: AddToLibraryActionContext; [ACTION_UNLINK_FROM_LIBRARY]: UnlinkFromLibraryActionContext; + [ACTION_LIBRARY_NOTIFICATION]: LibraryNotificationActionContext; } } @@ -437,6 +448,10 @@ export class DashboardPlugin const unlinkFromLibraryAction = new UnlinkFromLibraryAction(); uiActions.registerAction(unlinkFromLibraryAction); uiActions.attachAction(CONTEXT_MENU_TRIGGER, unlinkFromLibraryAction.id); + + const libraryNotificationAction = new LibraryNotificationAction(); + uiActions.registerAction(libraryNotificationAction); + uiActions.attachAction(PANEL_NOTIFICATION_TRIGGER, libraryNotificationAction.id); } const savedDashboardLoader = createSavedDashboardLoader({ @@ -458,14 +473,16 @@ export class DashboardPlugin DashboardContainerByValueRenderer: createDashboardContainerByValueRenderer({ factory: dashboardContainerFactory, }), - getAttributeService: (type: string) => + getAttributeService: (type: string, options) => new AttributeService( type, + showSaveModal, core.savedObjects.client, core.overlays, core.i18n.Context, core.notifications.toasts, - embeddable.getEmbeddableFactory + embeddable.getEmbeddableFactory, + options ), }; } diff --git a/src/plugins/dashboard/public/saved_dashboards/saved_dashboards.ts b/src/plugins/dashboard/public/saved_dashboards/saved_dashboards.ts index 09357072a13a6..3bd4d66a693b1 100644 --- a/src/plugins/dashboard/public/saved_dashboards/saved_dashboards.ts +++ b/src/plugins/dashboard/public/saved_dashboards/saved_dashboards.ts @@ -35,5 +35,5 @@ interface Services { */ export function createSavedDashboardLoader(services: Services) { const SavedDashboard = createSavedDashboardClass(services); - return new SavedObjectLoader(SavedDashboard, services.savedObjectsClient, services.chrome); + return new SavedObjectLoader(SavedDashboard, services.savedObjectsClient); } diff --git a/src/plugins/data/common/constants.ts b/src/plugins/data/common/constants.ts index 22db1552e4303..43120583bd3a4 100644 --- a/src/plugins/data/common/constants.ts +++ b/src/plugins/data/common/constants.ts @@ -32,6 +32,7 @@ export const UI_SETTINGS = { COURIER_MAX_CONCURRENT_SHARD_REQUESTS: 'courier:maxConcurrentShardRequests', COURIER_BATCH_SEARCHES: 'courier:batchSearches', SEARCH_INCLUDE_FROZEN: 'search:includeFrozen', + SEARCH_TIMEOUT: 'search:timeout', HISTOGRAM_BAR_TARGET: 'histogram:barTarget', HISTOGRAM_MAX_BARS: 'histogram:maxBars', HISTORY_LIMIT: 'history:limit', diff --git a/src/plugins/data/common/es_query/filters/get_display_value.ts b/src/plugins/data/common/es_query/filters/get_display_value.ts index 10b4dab3f46ef..28ba0ab629e8f 100644 --- a/src/plugins/data/common/es_query/filters/get_display_value.ts +++ b/src/plugins/data/common/es_query/filters/get_display_value.ts @@ -17,30 +17,26 @@ * under the License. */ -import { get } from 'lodash'; import { i18n } from '@kbn/i18n'; -import { IIndexPattern, IFieldType } from '../..'; +import { IIndexPattern } from '../..'; import { getIndexPatternFromFilter } from './get_index_pattern_from_filter'; import { Filter } from '../filters'; function getValueFormatter(indexPattern?: IIndexPattern, key?: string) { - if (!indexPattern || !key) return; + // checking getFormatterForField exists because there is at least once case where an index pattern + // is an object rather than an IndexPattern class + if (!indexPattern || !indexPattern.getFormatterForField || !key) return; - let format = get(indexPattern, ['fields', 'byName', key, 'format']); - if (!format && (indexPattern.fields as any).getByName) { - // TODO: Why is indexPatterns sometimes a map and sometimes an array? - const field: IFieldType = (indexPattern.fields as any).getByName(key); - if (!field) { - throw new Error( - i18n.translate('data.filter.filterBar.fieldNotFound', { - defaultMessage: 'Field {key} not found in index pattern {indexPattern}', - values: { key, indexPattern: indexPattern.title }, - }) - ); - } - format = field.format; + const field = indexPattern.fields.find((f) => f.name === key); + if (!field) { + throw new Error( + i18n.translate('data.filter.filterBar.fieldNotFound', { + defaultMessage: 'Field {key} not found in index pattern {indexPattern}', + values: { key, indexPattern: indexPattern.title }, + }) + ); } - return format; + return indexPattern.getFormatterForField(field); } export function getDisplayValueFromFilter(filter: Filter, indexPatterns: IIndexPattern[]): string { diff --git a/src/plugins/data/common/field_formats/converters/duration.test.ts b/src/plugins/data/common/field_formats/converters/duration.test.ts index d6205d54bd702..69163842f3498 100644 --- a/src/plugins/data/common/field_formats/converters/duration.test.ts +++ b/src/plugins/data/common/field_formats/converters/duration.test.ts @@ -24,11 +24,16 @@ describe('Duration Format', () => { inputFormat: 'seconds', outputFormat: 'humanize', outputPrecision: undefined, + showSuffix: undefined, fixtures: [ { input: -60, output: 'minus a minute', }, + { + input: 1, + output: 'a few seconds', + }, { input: 60, output: 'a minute', @@ -44,6 +49,7 @@ describe('Duration Format', () => { inputFormat: 'minutes', outputFormat: 'humanize', outputPrecision: undefined, + showSuffix: undefined, fixtures: [ { input: -60, @@ -64,6 +70,7 @@ describe('Duration Format', () => { inputFormat: 'minutes', outputFormat: 'asHours', outputPrecision: undefined, + showSuffix: undefined, fixtures: [ { input: -60, @@ -84,6 +91,7 @@ describe('Duration Format', () => { inputFormat: 'seconds', outputFormat: 'asSeconds', outputPrecision: 0, + showSuffix: undefined, fixtures: [ { input: -60, @@ -104,6 +112,7 @@ describe('Duration Format', () => { inputFormat: 'seconds', outputFormat: 'asSeconds', outputPrecision: 2, + showSuffix: undefined, fixtures: [ { input: -60, @@ -124,15 +133,34 @@ describe('Duration Format', () => { ], }); + testCase({ + inputFormat: 'seconds', + outputFormat: 'asSeconds', + outputPrecision: 0, + showSuffix: true, + fixtures: [ + { + input: -60, + output: '-60 Seconds', + }, + { + input: -32.333, + output: '-32 Seconds', + }, + ], + }); + function testCase({ inputFormat, outputFormat, outputPrecision, + showSuffix, fixtures, }: { inputFormat: string; outputFormat: string; outputPrecision: number | undefined; + showSuffix: boolean | undefined; fixtures: any[]; }) { fixtures.forEach((fixture: Record) => { @@ -143,7 +171,7 @@ describe('Duration Format', () => { outputPrecision ? `, ${outputPrecision} decimals` : '' }`, () => { const duration = new DurationFormat( - { inputFormat, outputFormat, outputPrecision }, + { inputFormat, outputFormat, outputPrecision, showSuffix }, jest.fn() ); expect(duration.convert(input)).toBe(output); diff --git a/src/plugins/data/common/field_formats/converters/duration.ts b/src/plugins/data/common/field_formats/converters/duration.ts index 53c2aba98120e..a3ce3d4dfd795 100644 --- a/src/plugins/data/common/field_formats/converters/duration.ts +++ b/src/plugins/data/common/field_formats/converters/duration.ts @@ -190,6 +190,7 @@ export class DurationFormat extends FieldFormat { const inputFormat = this.param('inputFormat'); const outputFormat = this.param('outputFormat') as keyof Duration; const outputPrecision = this.param('outputPrecision'); + const showSuffix = Boolean(this.param('showSuffix')); const human = this.isHuman(); const prefix = val < 0 && human @@ -200,6 +201,9 @@ export class DurationFormat extends FieldFormat { const duration = parseInputAsDuration(val, inputFormat) as Record; const formatted = duration[outputFormat](); const precise = human ? formatted : formatted.toFixed(outputPrecision); - return prefix + precise; + const type = outputFormats.find(({ method }) => method === outputFormat); + const suffix = showSuffix && type ? ` ${type.text}` : ''; + + return prefix + precise + suffix; }; } diff --git a/src/plugins/data/common/field_formats/converters/source.ts b/src/plugins/data/common/field_formats/converters/source.ts index 9c81bb011e127..e7dce82c725d2 100644 --- a/src/plugins/data/common/field_formats/converters/source.ts +++ b/src/plugins/data/common/field_formats/converters/source.ts @@ -60,7 +60,7 @@ export class SourceFormat extends FieldFormat { textConvert: TextContextTypeConvert = (value) => JSON.stringify(value); htmlConvert: HtmlContextTypeConvert = (value, options = {}) => { - const { field, hit } = options; + const { field, hit, indexPattern } = options; if (!field) { const converter = this.getConverterFor('text') as Function; @@ -69,7 +69,7 @@ export class SourceFormat extends FieldFormat { } const highlights = (hit && hit.highlight) || {}; - const formatted = field.indexPattern.formatHit(hit); + const formatted = indexPattern.formatHit(hit); const highlightPairs: any[] = []; const sourcePairs: any[] = []; const isShortDots = this.getConfig!(UI_SETTINGS.SHORT_DOTS_ENABLE); diff --git a/src/plugins/data/common/field_formats/field_formats_registry.ts b/src/plugins/data/common/field_formats/field_formats_registry.ts index 4b46adf399363..dbc3693c99779 100644 --- a/src/plugins/data/common/field_formats/field_formats_registry.ts +++ b/src/plugins/data/common/field_formats/field_formats_registry.ts @@ -181,11 +181,11 @@ export class FieldFormatsRegistry { * @param {ES_FIELD_TYPES[]} esTypes * @return {FieldFormat} */ - getDefaultInstancePlain( + getDefaultInstancePlain = ( fieldType: KBN_FIELD_TYPES, esTypes?: ES_FIELD_TYPES[], params: Record = {} - ): FieldFormat { + ): FieldFormat => { const conf = this.getDefaultConfig(fieldType, esTypes); const instanceParams = { ...conf.params, @@ -193,7 +193,7 @@ export class FieldFormatsRegistry { }; return this.getInstance(conf.id, instanceParams); - } + }; /** * Returns a cache key built by the given variables for caching in memoized * Where esType contains fieldType, fieldType is returned diff --git a/src/plugins/data/common/field_formats/types.ts b/src/plugins/data/common/field_formats/types.ts index daa44b2b0f85b..af956a20c0dc5 100644 --- a/src/plugins/data/common/field_formats/types.ts +++ b/src/plugins/data/common/field_formats/types.ts @@ -27,6 +27,7 @@ export type FieldFormatsContentType = 'html' | 'text'; /** @internal **/ export interface HtmlContextTypeOptions { field?: any; + indexPattern?: any; hit?: Record; } diff --git a/src/plugins/data/common/index_patterns/errors.ts b/src/plugins/data/common/index_patterns/errors.ts new file mode 100644 index 0000000000000..3d92bae1968fb --- /dev/null +++ b/src/plugins/data/common/index_patterns/errors.ts @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { FieldSpec } from './types'; + +export class FieldTypeUnknownError extends Error { + public readonly fieldSpec: FieldSpec; + constructor(message: string, spec: FieldSpec) { + super(message); + this.name = 'FieldTypeUnknownError'; + this.fieldSpec = spec; + } +} diff --git a/src/plugins/data/common/index_patterns/fields/__snapshots__/index_pattern_field.test.ts.snap b/src/plugins/data/common/index_patterns/fields/__snapshots__/index_pattern_field.test.ts.snap index e61593f6bfb27..4279dd320ad62 100644 --- a/src/plugins/data/common/index_patterns/fields/__snapshots__/index_pattern_field.test.ts.snap +++ b/src/plugins/data/common/index_patterns/fields/__snapshots__/index_pattern_field.test.ts.snap @@ -14,7 +14,7 @@ Object { }, "count": 1, "esTypes": Array [ - "type", + "text", ], "lang": "lang", "name": "name", @@ -30,7 +30,7 @@ Object { "path": "path", }, }, - "type": "type", + "type": "string", } `; @@ -48,7 +48,7 @@ Object { }, "count": 1, "esTypes": Array [ - "type", + "text", ], "format": Object { "id": "number", @@ -70,6 +70,6 @@ Object { "path": "path", }, }, - "type": "type", + "type": "string", } `; diff --git a/src/plugins/data/common/index_patterns/fields/field_list.ts b/src/plugins/data/common/index_patterns/fields/field_list.ts index d2489a5d1f7e3..4cf6075869851 100644 --- a/src/plugins/data/common/index_patterns/fields/field_list.ts +++ b/src/plugins/data/common/index_patterns/fields/field_list.ts @@ -35,6 +35,7 @@ export interface IIndexPatternFieldList extends Array { removeAll(): void; replaceAll(specs: FieldSpec[]): void; update(field: FieldSpec): void; + toSpec(options?: { getFormatterForField?: IndexPattern['getFormatterForField'] }): FieldSpec[]; } export type CreateIndexPatternFieldList = ( @@ -44,87 +45,79 @@ export type CreateIndexPatternFieldList = ( onNotification?: OnNotification ) => IIndexPatternFieldList; -export class FieldList extends Array implements IIndexPatternFieldList { - private byName: FieldMap = new Map(); - private groups: Map = new Map(); - private indexPattern: IndexPattern; - private shortDotsEnable: boolean; - private onNotification: OnNotification; - private setByName = (field: IndexPatternField) => this.byName.set(field.name, field); - private setByGroup = (field: IndexPatternField) => { - if (typeof this.groups.get(field.type) === 'undefined') { - this.groups.set(field.type, new Map()); +// extending the array class and using a constructor doesn't work well +// when calling filter and similar so wrapping in a callback. +// to be removed in the future +export const fieldList = ( + specs: FieldSpec[] = [], + shortDotsEnable = false +): IIndexPatternFieldList => { + class FldList extends Array implements IIndexPatternFieldList { + private byName: FieldMap = new Map(); + private groups: Map = new Map(); + private setByName = (field: IndexPatternField) => this.byName.set(field.name, field); + private setByGroup = (field: IndexPatternField) => { + if (typeof this.groups.get(field.type) === 'undefined') { + this.groups.set(field.type, new Map()); + } + this.groups.get(field.type)!.set(field.name, field); + }; + private removeByGroup = (field: IFieldType) => this.groups.get(field.type)!.delete(field.name); + private calcDisplayName = (name: string) => + shortDotsEnable ? shortenDottedString(name) : name; + constructor() { + super(); + specs.map((field) => this.add(field)); } - this.groups.get(field.type)!.set(field.name, field); - }; - private removeByGroup = (field: IFieldType) => this.groups.get(field.type)!.delete(field.name); - private calcDisplayName = (name: string) => - this.shortDotsEnable ? shortenDottedString(name) : name; - constructor( - indexPattern: IndexPattern, - specs: FieldSpec[] = [], - shortDotsEnable = false, - onNotification: OnNotification = () => {} - ) { - super(); - this.indexPattern = indexPattern; - this.shortDotsEnable = shortDotsEnable; - this.onNotification = onNotification; - specs.map((field) => this.add(field)); - } + public readonly getAll = () => [...this.byName.values()]; + public readonly getByName = (name: IndexPatternField['name']) => this.byName.get(name); + public readonly getByType = (type: IndexPatternField['type']) => [ + ...(this.groups.get(type) || new Map()).values(), + ]; + public readonly add = (field: FieldSpec) => { + const newField = new IndexPatternField(field, this.calcDisplayName(field.name)); + this.push(newField); + this.setByName(newField); + this.setByGroup(newField); + }; - public readonly getAll = () => [...this.byName.values()]; - public readonly getByName = (name: IndexPatternField['name']) => this.byName.get(name); - public readonly getByType = (type: IndexPatternField['type']) => [ - ...(this.groups.get(type) || new Map()).values(), - ]; - public readonly add = (field: FieldSpec) => { - const newField = new IndexPatternField( - this.indexPattern, - field, - this.calcDisplayName(field.name), - this.onNotification - ); - this.push(newField); - this.setByName(newField); - this.setByGroup(newField); - }; + public readonly remove = (field: IFieldType) => { + this.removeByGroup(field); + this.byName.delete(field.name); - public readonly remove = (field: IFieldType) => { - this.removeByGroup(field); - this.byName.delete(field.name); + const fieldIndex = findIndex(this, { name: field.name }); + this.splice(fieldIndex, 1); + }; - const fieldIndex = findIndex(this, { name: field.name }); - this.splice(fieldIndex, 1); - }; + public readonly update = (field: FieldSpec) => { + const newField = new IndexPatternField(field, this.calcDisplayName(field.name)); + const index = this.findIndex((f) => f.name === newField.name); + this.splice(index, 1, newField); + this.setByName(newField); + this.removeByGroup(newField); + this.setByGroup(newField); + }; - public readonly update = (field: FieldSpec) => { - const newField = new IndexPatternField( - this.indexPattern, - field, - this.calcDisplayName(field.name), - this.onNotification - ); - const index = this.findIndex((f) => f.name === newField.name); - this.splice(index, 1, newField); - this.setByName(newField); - this.removeByGroup(newField); - this.setByGroup(newField); - }; + public readonly removeAll = () => { + this.length = 0; + this.byName.clear(); + this.groups.clear(); + }; - public readonly removeAll = () => { - this.length = 0; - this.byName.clear(); - this.groups.clear(); - }; + public readonly replaceAll = (spcs: FieldSpec[]) => { + this.removeAll(); + spcs.forEach(this.add); + }; - public readonly replaceAll = (specs: FieldSpec[]) => { - this.removeAll(); - specs.forEach(this.add); - }; + public toSpec({ + getFormatterForField, + }: { + getFormatterForField?: IndexPattern['getFormatterForField']; + } = {}) { + return [...this.map((field) => field.toSpec({ getFormatterForField }))]; + } + } - public readonly toSpec = () => { - return [...this.map((field) => field.toSpec())]; - }; -} + return new FldList(); +}; diff --git a/src/plugins/data/common/index_patterns/fields/index_pattern_field.test.ts b/src/plugins/data/common/index_patterns/fields/index_pattern_field.test.ts index 0cd0fe8324809..3c4fac81c2c7c 100644 --- a/src/plugins/data/common/index_patterns/fields/index_pattern_field.test.ts +++ b/src/plugins/data/common/index_patterns/fields/index_pattern_field.test.ts @@ -19,7 +19,7 @@ import { IndexPatternField } from './index_pattern_field'; import { IndexPattern } from '../index_patterns'; -import { KBN_FIELD_TYPES } from '../../../common'; +import { KBN_FIELD_TYPES, FieldFormat } from '../../../common'; import { FieldSpec } from '../types'; describe('Field', function () { @@ -28,21 +28,16 @@ describe('Field', function () { } function getField(values = {}) { - return new IndexPatternField( - fieldValues.indexPattern as IndexPattern, - { ...fieldValues, ...values }, - 'displayName', - () => {} - ); + return new IndexPatternField({ ...fieldValues, ...values }, 'displayName'); } const fieldValues = { name: 'name', - type: 'type', + type: 'string', script: 'script', lang: 'lang', count: 1, - esTypes: ['type'], + esTypes: ['text'], aggregatable: true, filterable: true, searchable: true, @@ -125,7 +120,7 @@ describe('Field', function () { const fieldB = getField({ indexed: true, type: KBN_FIELD_TYPES.STRING }); expect(fieldB.sortable).toEqual(true); - const fieldC = getField({ indexed: false }); + const fieldC = getField({ indexed: false, aggregatable: false, scripted: false }); expect(fieldC.sortable).toEqual(false); }); @@ -139,31 +134,26 @@ describe('Field', function () { const fieldC = getField({ indexed: true, type: KBN_FIELD_TYPES.STRING }); expect(fieldC.filterable).toEqual(true); - const fieldD = getField({ scripted: false, indexed: false }); + const fieldD = getField({ scripted: false, indexed: false, searchable: false }); expect(fieldD.filterable).toEqual(false); }); it('exports the property to JSON', () => { - const field = new IndexPatternField( - { fieldFormatMap: { name: {} } } as IndexPattern, - fieldValues, - 'displayName', - () => {} - ); + const field = new IndexPatternField(fieldValues, 'displayName'); expect(flatten(field)).toMatchSnapshot(); }); it('spec snapshot', () => { - const field = new IndexPatternField( - { - fieldFormatMap: { - name: { toJSON: () => ({ id: 'number', params: { pattern: '$0,0.[00]' } }) }, - }, - } as IndexPattern, - fieldValues, - 'displayName', - () => {} - ); - expect(field.toSpec()).toMatchSnapshot(); + const field = new IndexPatternField(fieldValues, 'displayName'); + const getFormatterForField = () => + ({ + toJSON: () => ({ + id: 'number', + params: { + pattern: '$0,0.[00]', + }, + }), + } as FieldFormat); + expect(field.toSpec({ getFormatterForField })).toMatchSnapshot(); }); }); diff --git a/src/plugins/data/common/index_patterns/fields/index_pattern_field.ts b/src/plugins/data/common/index_patterns/fields/index_pattern_field.ts index 965f1a7f63065..7f72bfe55c7cd 100644 --- a/src/plugins/data/common/index_patterns/fields/index_pattern_field.ts +++ b/src/plugins/data/common/index_patterns/fields/index_pattern_field.ts @@ -20,40 +20,27 @@ import { i18n } from '@kbn/i18n'; import { KbnFieldType, getKbnFieldType } from '../../kbn_field_types'; import { KBN_FIELD_TYPES } from '../../kbn_field_types/types'; -import { FieldFormat } from '../../field_formats'; import { IFieldType } from './types'; -import { OnNotification, FieldSpec } from '../types'; - -import { IndexPattern } from '../index_patterns'; +import { FieldSpec, IndexPattern } from '../..'; +import { FieldTypeUnknownError } from '../errors'; export class IndexPatternField implements IFieldType { readonly spec: FieldSpec; // not writable or serialized - readonly indexPattern: IndexPattern; readonly displayName: string; private readonly kbnFieldType: KbnFieldType; - constructor( - indexPattern: IndexPattern, - spec: FieldSpec, - displayName: string, - onNotification: OnNotification - ) { - this.indexPattern = indexPattern; + constructor(spec: FieldSpec, displayName: string) { this.spec = { ...spec, type: spec.name === '_source' ? '_source' : spec.type }; this.displayName = displayName; this.kbnFieldType = getKbnFieldType(spec.type); if (spec.type && this.kbnFieldType?.name === KBN_FIELD_TYPES.UNKNOWN) { - const title = i18n.translate('data.indexPatterns.unknownFieldHeader', { - values: { type: spec.type }, - defaultMessage: 'Unknown field type {type}', - }); - const text = i18n.translate('data.indexPatterns.unknownFieldErrorMessage', { - values: { name: spec.name, title: indexPattern.title }, - defaultMessage: 'Field {name} in indexPattern {title} is using an unknown field type.', + const msg = i18n.translate('data.indexPatterns.unknownFieldTypeErrorMsg', { + values: { type: spec.type, name: spec.name }, + defaultMessage: `Field '{name}' Unknown field type '{type}'`, }); - onNotification({ title, text, color: 'danger', iconType: 'alert' }); + throw new FieldTypeUnknownError(msg, spec); } } @@ -143,10 +130,6 @@ export class IndexPatternField implements IFieldType { return this.aggregatable; } - public get format(): FieldFormat { - return this.indexPattern.getFormatterForField(this); - } - public toJSON() { return { count: this.count, @@ -165,7 +148,11 @@ export class IndexPatternField implements IFieldType { }; } - public toSpec() { + public toSpec({ + getFormatterForField, + }: { + getFormatterForField?: IndexPattern['getFormatterForField']; + } = {}) { return { count: this.count, script: this.script, @@ -179,7 +166,7 @@ export class IndexPatternField implements IFieldType { aggregatable: this.aggregatable, readFromDocValues: this.readFromDocValues, subType: this.subType, - format: this.indexPattern?.fieldFormatMap[this.name]?.toJSON() || undefined, + format: getFormatterForField ? getFormatterForField(this).toJSON() : undefined, }; } } diff --git a/src/plugins/data/common/index_patterns/fields/types.ts b/src/plugins/data/common/index_patterns/fields/types.ts index 558b5b57dce40..5814760601a67 100644 --- a/src/plugins/data/common/index_patterns/fields/types.ts +++ b/src/plugins/data/common/index_patterns/fields/types.ts @@ -17,7 +17,7 @@ * under the License. */ -import { FieldSpec, IFieldSubType } from '../types'; +import { FieldSpec, IFieldSubType, IndexPattern } from '../..'; export interface IFieldType { name: string; @@ -38,5 +38,5 @@ export interface IFieldType { subType?: IFieldSubType; displayName?: string; format?: any; - toSpec?: () => FieldSpec; + toSpec?: (options?: { getFormatterForField?: IndexPattern['getFormatterForField'] }) => FieldSpec; } diff --git a/src/plugins/data/common/index_patterns/index.ts b/src/plugins/data/common/index_patterns/index.ts index 51a642b775c29..08f478404be2c 100644 --- a/src/plugins/data/common/index_patterns/index.ts +++ b/src/plugins/data/common/index_patterns/index.ts @@ -20,3 +20,5 @@ export * from './fields'; export * from './types'; export { IndexPatternsService } from './index_patterns'; +export type { IndexPattern } from './index_patterns'; +export * from './errors'; diff --git a/src/plugins/data/common/index_patterns/index_patterns/__snapshots__/index_pattern.test.ts.snap b/src/plugins/data/common/index_patterns/index_patterns/__snapshots__/index_pattern.test.ts.snap index 047ac836a87d1..1871627da76de 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/__snapshots__/index_pattern.test.ts.snap +++ b/src/plugins/data/common/index_patterns/index_patterns/__snapshots__/index_pattern.test.ts.snap @@ -32,7 +32,12 @@ Object { "esTypes": Array [ "boolean", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "ssl", "readFromDocValues": true, @@ -49,7 +54,12 @@ Object { "esTypes": Array [ "date", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "@timestamp", "readFromDocValues": true, @@ -66,7 +76,12 @@ Object { "esTypes": Array [ "date", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "time", "readFromDocValues": true, @@ -83,7 +98,12 @@ Object { "esTypes": Array [ "keyword", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "@tags", "readFromDocValues": true, @@ -100,7 +120,12 @@ Object { "esTypes": Array [ "date", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "utc_time", "readFromDocValues": true, @@ -117,7 +142,12 @@ Object { "esTypes": Array [ "integer", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "phpmemory", "readFromDocValues": true, @@ -134,7 +164,12 @@ Object { "esTypes": Array [ "ip", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "ip", "readFromDocValues": true, @@ -151,7 +186,12 @@ Object { "esTypes": Array [ "attachment", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "request_body", "readFromDocValues": true, @@ -168,7 +208,12 @@ Object { "esTypes": Array [ "geo_point", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "point", "readFromDocValues": true, @@ -185,7 +230,12 @@ Object { "esTypes": Array [ "geo_shape", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "area", "readFromDocValues": false, @@ -202,7 +252,12 @@ Object { "esTypes": Array [ "murmur3", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "hashed", "readFromDocValues": false, @@ -219,7 +274,12 @@ Object { "esTypes": Array [ "geo_point", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "geo.coordinates", "readFromDocValues": true, @@ -236,7 +296,12 @@ Object { "esTypes": Array [ "text", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "extension", "readFromDocValues": false, @@ -253,7 +318,12 @@ Object { "esTypes": Array [ "keyword", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "extension.keyword", "readFromDocValues": true, @@ -274,7 +344,12 @@ Object { "esTypes": Array [ "text", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "machine.os", "readFromDocValues": false, @@ -291,7 +366,12 @@ Object { "esTypes": Array [ "keyword", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "machine.os.raw", "readFromDocValues": true, @@ -312,7 +392,12 @@ Object { "esTypes": Array [ "keyword", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "geo.src", "readFromDocValues": true, @@ -329,7 +414,12 @@ Object { "esTypes": Array [ "_id", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "_id", "readFromDocValues": false, @@ -346,7 +436,12 @@ Object { "esTypes": Array [ "_type", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "_type", "readFromDocValues": false, @@ -363,7 +458,12 @@ Object { "esTypes": Array [ "_source", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "_source", "readFromDocValues": false, @@ -380,7 +480,12 @@ Object { "esTypes": Array [ "text", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "non-filterable", "readFromDocValues": false, @@ -397,7 +502,12 @@ Object { "esTypes": Array [ "text", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "non-sortable", "readFromDocValues": false, @@ -414,7 +524,12 @@ Object { "esTypes": Array [ "conflict", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": undefined, "name": "custom_user_field", "readFromDocValues": true, @@ -431,7 +546,12 @@ Object { "esTypes": Array [ "text", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": "expression", "name": "script string", "readFromDocValues": false, @@ -448,7 +568,12 @@ Object { "esTypes": Array [ "long", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": "expression", "name": "script number", "readFromDocValues": false, @@ -465,7 +590,12 @@ Object { "esTypes": Array [ "date", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": "painless", "name": "script date", "readFromDocValues": false, @@ -482,7 +612,12 @@ Object { "esTypes": Array [ "murmur3", ], - "format": undefined, + "format": Object { + "id": "number", + "params": Object { + "pattern": "$0,0.[00]", + }, + }, "lang": "expression", "name": "script murmur3", "readFromDocValues": false, @@ -496,7 +631,7 @@ Object { "id": "test-pattern", "sourceFilters": undefined, "timeFieldName": "timestamp", - "title": "test-pattern", + "title": "title", "typeMeta": undefined, "version": 2, } diff --git a/src/plugins/data/common/index_patterns/index_patterns/format_hit.ts b/src/plugins/data/common/index_patterns/index_patterns/format_hit.ts index a0597ed4b9026..b47fef107258a 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/format_hit.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/format_hit.ts @@ -34,9 +34,9 @@ export function formatHitProvider(indexPattern: IndexPattern, defaultFormat: any type: FieldFormatsContentType = 'html' ) { const field = indexPattern.fields.getByName(fieldName); - const format = field ? field.format : defaultFormat; + const format = field ? indexPattern.getFormatterForField(field) : defaultFormat; - return format.convert(val, type, { field, hit }); + return format.convert(val, type, { field, hit, indexPattern }); } function formatHit(hit: Record, type: string = 'html') { diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.test.ts b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.test.ts index f7e1156170f03..f49897c47d562 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.test.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.test.ts @@ -17,7 +17,7 @@ * under the License. */ -import { defaults, map, last, get } from 'lodash'; +import { defaults, map, last } from 'lodash'; import { IndexPattern } from './index_pattern'; @@ -29,6 +29,7 @@ import { stubbedSavedObjectIndexPattern } from '../../../../../fixtures/stubbed_ import { IndexPatternField } from '../fields'; import { fieldFormatsMock } from '../../field_formats/mocks'; +import { FieldFormat, IndexPatternsService } from '../..'; class MockFieldFormatter {} @@ -115,6 +116,7 @@ function create(id: string, payload?: any): Promise { apiClient, patternCache, fieldFormats: fieldFormatsMock, + indexPatternsService: {} as IndexPatternsService, onNotification: () => {}, onError: () => {}, shortDotsEnable: false, @@ -150,9 +152,6 @@ describe('IndexPattern', () => { expect(indexPattern).toHaveProperty('getNonScriptedFields'); expect(indexPattern).toHaveProperty('addScriptedField'); expect(indexPattern).toHaveProperty('removeScriptedField'); - expect(indexPattern).toHaveProperty('toString'); - expect(indexPattern).toHaveProperty('toJSON'); - expect(indexPattern).toHaveProperty('save'); // properties expect(indexPattern).toHaveProperty('fields'); @@ -170,7 +169,6 @@ describe('IndexPattern', () => { test('should have expected properties on fields', function () { expect(indexPattern.fields[0]).toHaveProperty('displayName'); expect(indexPattern.fields[0]).toHaveProperty('filterable'); - expect(indexPattern.fields[0]).toHaveProperty('format'); expect(indexPattern.fields[0]).toHaveProperty('sortable'); expect(indexPattern.fields[0]).toHaveProperty('scripted'); }); @@ -319,16 +317,18 @@ describe('IndexPattern', () => { describe('toSpec', () => { test('should match snapshot', () => { - indexPattern.fieldFormatMap.bytes = { + const formatter = { toJSON: () => ({ id: 'number', params: { pattern: '$0,0.[00]' } }), - }; + } as FieldFormat; + indexPattern.getFormatterForField = () => formatter; expect(indexPattern.toSpec()).toMatchSnapshot(); }); test('can restore from spec', async () => { - indexPattern.fieldFormatMap.bytes = { + const formatter = { toJSON: () => ({ id: 'number', params: { pattern: '$0,0.[00]' } }), - }; + } as FieldFormat; + indexPattern.getFormatterForField = () => formatter; const spec = indexPattern.toSpec(); const restoredPattern = await create(spec.id as string); restoredPattern.initFromSpec(spec); @@ -389,60 +389,4 @@ describe('IndexPattern', () => { }); }); }); - - test('should handle version conflicts', async () => { - setDocsourcePayload(null, { - id: 'foo', - version: 'foo', - attributes: { - title: 'something', - }, - }); - // Create a normal index pattern - const pattern = new IndexPattern('foo', { - savedObjectsClient: savedObjectsClient as any, - apiClient, - patternCache, - fieldFormats: fieldFormatsMock, - onNotification: () => {}, - onError: () => {}, - shortDotsEnable: false, - metaFields: [], - }); - await pattern.init(); - - expect(get(pattern, 'version')).toBe('fooa'); - - // Create the same one - we're going to handle concurrency - const samePattern = new IndexPattern('foo', { - savedObjectsClient: savedObjectsClient as any, - apiClient, - patternCache, - fieldFormats: fieldFormatsMock, - onNotification: () => {}, - onError: () => {}, - shortDotsEnable: false, - metaFields: [], - }); - await samePattern.init(); - - expect(get(samePattern, 'version')).toBe('fooaa'); - - // This will conflict because samePattern did a save (from refreshFields) - // but the resave should work fine - pattern.title = 'foo2'; - await pattern.save(); - - // This should not be able to recover - samePattern.title = 'foo3'; - - let result; - try { - await samePattern.save(); - } catch (err) { - result = err; - } - - expect(result.res.status).toBe(409); - }); }); diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts index ea91a9bb14e1f..76f1a5e59d0ee 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts @@ -26,11 +26,12 @@ import { ES_FIELD_TYPES, KBN_FIELD_TYPES, IIndexPattern, + FieldTypeUnknownError, FieldFormatNotFoundError, } from '../../../common'; import { findByTitle } from '../utils'; import { IndexPatternMissingIndices } from '../lib'; -import { IndexPatternField, IIndexPatternFieldList, FieldList } from '../fields'; +import { IndexPatternField, IIndexPatternFieldList, fieldList } from '../fields'; import { createFieldsFetcher } from './_fields_fetcher'; import { formatHitProvider } from './format_hit'; import { flattenHitWrapper } from './flatten_hit'; @@ -40,8 +41,8 @@ import { PatternCache } from './_pattern_cache'; import { expandShorthand, FieldMappingSpec, MappingObject } from '../../field_mapping'; import { IndexPatternSpec, TypeMeta, FieldSpec, SourceFilter } from '../types'; import { SerializedFieldFormat } from '../../../../expressions/common'; +import { IndexPatternsService } from '..'; -const MAX_ATTEMPTS_TO_RESOLVE_CONFLICTS = 3; const savedObjectType = 'index-pattern'; interface IndexPatternDeps { @@ -49,6 +50,7 @@ interface IndexPatternDeps { apiClient: IIndexPatternsApiClient; patternCache: PatternCache; fieldFormats: FieldFormatsStartCommon; + indexPatternsService: IndexPatternsService; onNotification: OnNotification; onError: OnError; shortDotsEnable: boolean; @@ -69,17 +71,18 @@ export class IndexPattern implements IIndexPattern { public flattenHit: any; public metaFields: string[]; - private version: string | undefined; + public version: string | undefined; private savedObjectsClient: SavedObjectsClientCommon; private patternCache: PatternCache; public sourceFilters?: SourceFilter[]; - private originalBody: { [key: string]: any } = {}; + // todo make read only, update via method or factor out + public originalBody: { [key: string]: any } = {}; public fieldsFetcher: any; // probably want to factor out any direct usage and change to private + private indexPatternsService: IndexPatternsService; private shortDotsEnable: boolean = false; private fieldFormats: FieldFormatsStartCommon; private onNotification: OnNotification; private onError: OnError; - private apiClient: IIndexPatternsApiClient; private mapping: MappingObject = expandShorthand({ title: ES_FIELD_TYPES.TEXT, @@ -110,6 +113,7 @@ export class IndexPattern implements IIndexPattern { apiClient, patternCache, fieldFormats, + indexPatternsService, onNotification, onError, shortDotsEnable = false, @@ -120,15 +124,14 @@ export class IndexPattern implements IIndexPattern { this.savedObjectsClient = savedObjectsClient; this.patternCache = patternCache; this.fieldFormats = fieldFormats; + this.indexPatternsService = indexPatternsService; this.onNotification = onNotification; this.onError = onError; this.shortDotsEnable = shortDotsEnable; this.metaFields = metaFields; + this.fields = fieldList([], this.shortDotsEnable); - this.fields = new FieldList(this, [], this.shortDotsEnable, this.onNotification); - - this.apiClient = apiClient; this.fieldsFetcher = createFieldsFetcher(this, apiClient, metaFields); this.flattenHit = flattenHitWrapper(this, metaFields); this.formatHit = formatHitProvider( @@ -138,6 +141,22 @@ export class IndexPattern implements IIndexPattern { this.formatField = this.formatHit.formatField; } + private unknownFieldErrorNotification( + fieldType: string, + fieldName: string, + indexPatternTitle: string + ) { + const title = i18n.translate('data.indexPatterns.unknownFieldHeader', { + values: { type: fieldType }, + defaultMessage: 'Unknown field type {type}', + }); + const text = i18n.translate('data.indexPatterns.unknownFieldErrorMessage', { + values: { name: fieldName, title: indexPatternTitle }, + defaultMessage: 'Field {name} in indexPattern {title} is using an unknown field type.', + }); + this.onNotification({ title, text, color: 'danger', iconType: 'alert' }); + } + private serializeFieldFormatMap(flat: any, format: string, field: string | undefined) { if (format && field) { flat[field] = format; @@ -181,7 +200,15 @@ export class IndexPattern implements IIndexPattern { await this.refreshFields(); } else { if (specs) { - this.fields.replaceAll(specs); + try { + this.fields.replaceAll(specs); + } catch (err) { + if (err instanceof FieldTypeUnknownError) { + this.unknownFieldErrorNotification(err.fieldSpec.name, err.fieldSpec.type, this.title); + } else { + throw err; + } + } } } } @@ -203,7 +230,15 @@ export class IndexPattern implements IIndexPattern { this.timeFieldName = spec.timeFieldName; this.sourceFilters = spec.sourceFilters; - this.fields.replaceAll(spec.fields || []); + try { + this.fields.replaceAll(spec.fields || []); + } catch (err) { + if (err instanceof FieldTypeUnknownError) { + this.unknownFieldErrorNotification(err.fieldSpec.name, err.fieldSpec.type, this.title); + } else { + throw err; + } + } this.typeMeta = spec.typeMeta; this.fieldFormatMap = _.mapValues(fieldFormatMap, (mapping) => { @@ -322,7 +357,7 @@ export class IndexPattern implements IIndexPattern { title: this.title, timeFieldName: this.timeFieldName, sourceFilters: this.sourceFilters, - fields: this.fields.toSpec(), + fields: this.fields.toSpec({ getFormatterForField: this.getFormatterForField.bind(this) }), typeMeta: this.typeMeta, }; } @@ -342,19 +377,25 @@ export class IndexPattern implements IIndexPattern { throw new DuplicateField(name); } - this.fields.add({ - name, - script, - type: fieldType, - scripted: true, - lang, - aggregatable: true, - searchable: true, - count: 0, - readFromDocValues: false, - }); - - await this.save(); + try { + this.fields.add({ + name, + script, + type: fieldType, + scripted: true, + lang, + aggregatable: true, + searchable: true, + count: 0, + readFromDocValues: false, + }); + } catch (err) { + if (err instanceof FieldTypeUnknownError) { + this.unknownFieldErrorNotification(err.fieldSpec.name, err.fieldSpec.type, this.title); + } else { + throw err; + } + } } removeScriptedField(fieldName: string) { @@ -362,7 +403,6 @@ export class IndexPattern implements IIndexPattern { if (field) { this.fields.remove(field); } - return this.save(); } async popularizeField(fieldName: string, unit = 1) { @@ -441,7 +481,7 @@ export class IndexPattern implements IIndexPattern { fields: this.mapping.fields._serialize!(this.fields), fieldFormatMap: this.mapping.fieldFormatMap._serialize!(this.fieldFormatMap), type: this.type, - typeMeta: this.mapping.typeMeta._serialize!(this.mapping), + typeMeta: this.mapping.typeMeta._serialize!(this.typeMeta), }; } @@ -483,131 +523,52 @@ export class IndexPattern implements IIndexPattern { return await _create(potentialDuplicateByTitle.id); } - async save(saveAttempts: number = 0): Promise { - if (!this.id) return; - const body = this.prepBody(); - - const originalChangedKeys: string[] = []; - Object.entries(body).forEach(([key, value]) => { - if (value !== this.originalBody[key]) { - originalChangedKeys.push(key); - } - }); - - return this.savedObjectsClient - .update(savedObjectType, this.id, body, { version: this.version }) - .then((resp) => { - this.id = resp.id; - this.version = resp.version; - }) - .catch((err) => { - if ( - _.get(err, 'res.status') === 409 && - saveAttempts++ < MAX_ATTEMPTS_TO_RESOLVE_CONFLICTS - ) { - const samePattern = new IndexPattern(this.id, { - savedObjectsClient: this.savedObjectsClient, - apiClient: this.apiClient, - patternCache: this.patternCache, - fieldFormats: this.fieldFormats, - onNotification: this.onNotification, - onError: this.onError, - shortDotsEnable: this.shortDotsEnable, - metaFields: this.metaFields, - }); - - return samePattern.init().then(() => { - // What keys changed from now and what the server returned - const updatedBody = samePattern.prepBody(); - - // Build a list of changed keys from the server response - // and ensure we ignore the key if the server response - // is the same as the original response (since that is expected - // if we made a change in that key) - - const serverChangedKeys: string[] = []; - Object.entries(updatedBody).forEach(([key, value]) => { - if (value !== (body as any)[key] && value !== this.originalBody[key]) { - serverChangedKeys.push(key); - } - }); - - let unresolvedCollision = false; - for (const originalKey of originalChangedKeys) { - for (const serverKey of serverChangedKeys) { - if (originalKey === serverKey) { - unresolvedCollision = true; - break; - } - } - } - - if (unresolvedCollision) { - const title = i18n.translate('data.indexPatterns.unableWriteLabel', { - defaultMessage: - 'Unable to write index pattern! Refresh the page to get the most up to date changes for this index pattern.', - }); - - this.onNotification({ title, color: 'danger' }); - throw err; - } - - // Set the updated response on this object - serverChangedKeys.forEach((key) => { - (this as any)[key] = (samePattern as any)[key]; - }); - this.version = samePattern.version; - - // Clear cache - this.patternCache.clear(this.id!); - - // Try the save again - return this.save(saveAttempts); - }); - } - throw err; - }); - } - async _fetchFields() { const fields = await this.fieldsFetcher.fetch(this); const scripted = this.getScriptedFields().map((field) => field.spec); - this.fields.replaceAll([...fields, ...scripted]); + try { + this.fields.replaceAll([...fields, ...scripted]); + } catch (err) { + if (err instanceof FieldTypeUnknownError) { + this.unknownFieldErrorNotification(err.fieldSpec.name, err.fieldSpec.type, this.title); + } else { + throw err; + } + } } refreshFields() { - return this._fetchFields() - .then(() => this.save()) - .catch((err) => { - // https://github.com/elastic/kibana/issues/9224 - // This call will attempt to remap fields from the matching - // ES index which may not actually exist. In that scenario, - // we still want to notify the user that there is a problem - // but we do not want to potentially make any pages unusable - // so do not rethrow the error here - - if (err instanceof IndexPatternMissingIndices) { - this.onNotification({ title: (err as any).message, color: 'danger', iconType: 'alert' }); - return []; - } - - this.onError(err, { - title: i18n.translate('data.indexPatterns.fetchFieldErrorTitle', { - defaultMessage: 'Error fetching fields for index pattern {title} (ID: {id})', - values: { - id: this.id, - title: this.title, - }, - }), - }); - }); - } - - toJSON() { - return this.id; - } - - toString() { - return '' + this.toJSON(); + return ( + this._fetchFields() + // todo + .then(() => this.indexPatternsService.save(this)) + .catch((err) => { + // https://github.com/elastic/kibana/issues/9224 + // This call will attempt to remap fields from the matching + // ES index which may not actually exist. In that scenario, + // we still want to notify the user that there is a problem + // but we do not want to potentially make any pages unusable + // so do not rethrow the error here + + if (err instanceof IndexPatternMissingIndices) { + this.onNotification({ + title: (err as any).message, + color: 'danger', + iconType: 'alert', + }); + return []; + } + + this.onError(err, { + title: i18n.translate('data.indexPatterns.fetchFieldErrorTitle', { + defaultMessage: 'Error fetching fields for index pattern {title} (ID: {id})', + values: { + id: this.id, + title: this.title, + }, + }), + }); + }) + ); } } diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts index 8223b31042124..c79c7900148ea 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts @@ -17,28 +17,26 @@ * under the License. */ -import { IndexPatternsService } from './index_patterns'; +import { defaults } from 'lodash'; +import { IndexPatternsService } from '.'; import { fieldFormatsMock } from '../../field_formats/mocks'; -import { - UiSettingsCommon, - IIndexPatternsApiClient, - SavedObjectsClientCommon, - SavedObject, -} from '../types'; +import { stubbedSavedObjectIndexPattern } from '../../../../../fixtures/stubbed_saved_object_index_pattern'; +import { UiSettingsCommon, SavedObjectsClientCommon, SavedObject } from '../types'; + +const createFieldsFetcher = jest.fn().mockImplementation(() => ({ + getFieldsForWildcard: jest.fn().mockImplementation(() => { + return new Promise((resolve) => resolve([])); + }), + every: jest.fn(), +})); const fieldFormats = fieldFormatsMock; -jest.mock('./index_pattern', () => { - class IndexPattern { - init = async () => { - return this; - }; - } +let object: any = {}; - return { - IndexPattern, - }; -}); +function setDocsourcePayload(id: string | null, providedPayload: any) { + object = defaults(providedPayload || {}, stubbedSavedObjectIndexPattern(id)); +} describe('IndexPatterns', () => { let indexPatterns: IndexPatternsService; @@ -53,6 +51,25 @@ describe('IndexPatterns', () => { > ); savedObjectsClient.delete = jest.fn(() => Promise.resolve({}) as Promise); + savedObjectsClient.get = jest.fn().mockImplementation(() => object); + savedObjectsClient.create = jest.fn(); + savedObjectsClient.update = jest + .fn() + .mockImplementation(async (type, id, body, { version }) => { + if (object.version !== version) { + throw new Object({ + res: { + status: 409, + }, + }); + } + object.attributes.title = body.title; + object.version += 'a'; + return { + id: object.id, + version: object.version, + }; + }); indexPatterns = new IndexPatternsService({ uiSettings: ({ @@ -60,7 +77,7 @@ describe('IndexPatterns', () => { getAll: () => {}, } as any) as UiSettingsCommon, savedObjectsClient: (savedObjectsClient as unknown) as SavedObjectsClientCommon, - apiClient: {} as IIndexPatternsApiClient, + apiClient: createFieldsFetcher(), fieldFormats, onNotification: () => {}, onError: () => {}, @@ -70,6 +87,14 @@ describe('IndexPatterns', () => { test('does cache gets for the same id', async () => { const id = '1'; + setDocsourcePayload(id, { + id: 'foo', + version: 'foo', + attributes: { + title: 'something', + }, + }); + const indexPattern = await indexPatterns.get(id); expect(indexPattern).toBeDefined(); @@ -107,4 +132,41 @@ describe('IndexPatterns', () => { await indexPatterns.delete(id); expect(indexPattern).not.toBe(await indexPatterns.get(id)); }); + + test('should handle version conflicts', async () => { + setDocsourcePayload(null, { + id: 'foo', + version: 'foo', + attributes: { + title: 'something', + }, + }); + + // Create a normal index patterns + const pattern = await indexPatterns.make('foo'); + + expect(pattern.version).toBe('fooa'); + + // Create the same one - we're going to handle concurrency + const samePattern = await indexPatterns.make('foo'); + + expect(samePattern.version).toBe('fooaa'); + + // This will conflict because samePattern did a save (from refreshFields) + // but the resave should work fine + pattern.title = 'foo2'; + await indexPatterns.save(pattern); + + // This should not be able to recover + samePattern.title = 'foo3'; + + let result; + try { + await indexPatterns.save(samePattern); + } catch (err) { + result = err; + } + + expect(result.res.status).toBe(409); + }); }); diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts index 0ad9ae8f2014f..88a7e9f6cef4c 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts @@ -17,6 +17,7 @@ * under the License. */ +import { i18n } from '@kbn/i18n'; import { SavedObjectsClientCommon } from '../..'; import { createIndexPatternCache } from '.'; @@ -25,7 +26,6 @@ import { createEnsureDefaultIndexPattern, EnsureDefaultIndexPattern, } from './ensure_default_index_pattern'; -import { IndexPatternField } from '../fields'; import { OnNotification, OnError, @@ -38,6 +38,8 @@ import { FieldFormatsStartCommon } from '../../field_formats'; import { UI_SETTINGS, SavedObject } from '../../../common'; const indexPatternCache = createIndexPatternCache(); +const MAX_ATTEMPTS_TO_RESOLVE_CONFLICTS = 3; +const savedObjectType = 'index-pattern'; type IndexPatternCachedFieldType = 'id' | 'title'; @@ -86,15 +88,6 @@ export class IndexPatternsService { ); } - public createField( - indexPattern: IndexPattern, - spec: IndexPatternField['spec'], - displayName: string, - onNotification: OnNotification - ) { - return new IndexPatternField(indexPattern, spec, displayName, onNotification); - } - private async refreshSavedObjectsCache() { this.savedObjectsCache = await this.savedObjectsClient.find({ type: 'index-pattern', @@ -191,6 +184,7 @@ export class IndexPatternsService { apiClient: this.apiClient, patternCache: indexPatternCache, fieldFormats: this.fieldFormats, + indexPatternsService: this, onNotification: this.onNotification, onError: this.onError, shortDotsEnable, @@ -201,6 +195,93 @@ export class IndexPatternsService { return indexPattern; } + async save(indexPattern: IndexPattern, saveAttempts: number = 0): Promise { + if (!indexPattern.id) return; + const shortDotsEnable = await this.config.get(UI_SETTINGS.SHORT_DOTS_ENABLE); + const metaFields = await this.config.get(UI_SETTINGS.META_FIELDS); + + const body = indexPattern.prepBody(); + + const originalChangedKeys: string[] = []; + Object.entries(body).forEach(([key, value]) => { + if (value !== indexPattern.originalBody[key]) { + originalChangedKeys.push(key); + } + }); + + return this.savedObjectsClient + .update(savedObjectType, indexPattern.id, body, { version: indexPattern.version }) + .then((resp) => { + indexPattern.id = resp.id; + indexPattern.version = resp.version; + }) + .catch((err) => { + if (err?.res?.status === 409 && saveAttempts++ < MAX_ATTEMPTS_TO_RESOLVE_CONFLICTS) { + const samePattern = new IndexPattern(indexPattern.id, { + savedObjectsClient: this.savedObjectsClient, + apiClient: this.apiClient, + patternCache: indexPatternCache, + fieldFormats: this.fieldFormats, + indexPatternsService: this, + onNotification: this.onNotification, + onError: this.onError, + shortDotsEnable, + metaFields, + }); + + return samePattern.init().then(() => { + // What keys changed from now and what the server returned + const updatedBody = samePattern.prepBody(); + + // Build a list of changed keys from the server response + // and ensure we ignore the key if the server response + // is the same as the original response (since that is expected + // if we made a change in that key) + + const serverChangedKeys: string[] = []; + Object.entries(updatedBody).forEach(([key, value]) => { + if (value !== (body as any)[key] && value !== indexPattern.originalBody[key]) { + serverChangedKeys.push(key); + } + }); + + let unresolvedCollision = false; + for (const originalKey of originalChangedKeys) { + for (const serverKey of serverChangedKeys) { + if (originalKey === serverKey) { + unresolvedCollision = true; + break; + } + } + } + + if (unresolvedCollision) { + const title = i18n.translate('data.indexPatterns.unableWriteLabel', { + defaultMessage: + 'Unable to write index pattern! Refresh the page to get the most up to date changes for this index pattern.', + }); + + this.onNotification({ title, color: 'danger' }); + throw err; + } + + // Set the updated response on this object + serverChangedKeys.forEach((key) => { + (indexPattern as any)[key] = (samePattern as any)[key]; + }); + indexPattern.version = samePattern.version; + + // Clear cache + indexPatternCache.clear(indexPattern.id!); + + // Try the save again + return this.save(indexPattern, saveAttempts); + }); + } + throw err; + }); + } + async make(id?: string): Promise { const shortDotsEnable = await this.config.get(UI_SETTINGS.SHORT_DOTS_ENABLE); const metaFields = await this.config.get(UI_SETTINGS.META_FIELDS); @@ -210,6 +291,7 @@ export class IndexPatternsService { apiClient: this.apiClient, patternCache: indexPatternCache, fieldFormats: this.fieldFormats, + indexPatternsService: this, onNotification: this.onNotification, onError: this.onError, shortDotsEnable, diff --git a/src/plugins/data/common/search/aggs/agg_config.test.ts b/src/plugins/data/common/search/aggs/agg_config.test.ts index a443eacee731c..f6fcc29805dc4 100644 --- a/src/plugins/data/common/search/aggs/agg_config.test.ts +++ b/src/plugins/data/common/search/aggs/agg_config.test.ts @@ -25,8 +25,7 @@ import { AggType } from './agg_type'; import { AggTypesRegistryStart } from './agg_types_registry'; import { mockAggTypesRegistry } from './test_helpers'; import { MetricAggType } from './metrics/metric_agg_type'; -import { IndexPattern } from '../../index_patterns/index_patterns/index_pattern'; -import { IIndexPatternFieldList } from '../../index_patterns/fields'; +import { IndexPattern, IndexPatternField, IIndexPatternFieldList } from '../../index_patterns'; describe('AggConfig', () => { let indexPattern: IndexPattern; @@ -67,6 +66,9 @@ describe('AggConfig', () => { getByName: (name: string) => fields.find((f) => f.name === name), filter: () => fields, } as unknown) as IndexPattern['fields'], + getFormatterForField: (field: IndexPatternField) => ({ + toJSON: () => ({}), + }), } as IndexPattern; typesRegistry = mockAggTypesRegistry(); }); diff --git a/src/plugins/data/common/search/aggs/agg_config.ts b/src/plugins/data/common/search/aggs/agg_config.ts index b5747ce7bb9bd..201e9f1ec402c 100644 --- a/src/plugins/data/common/search/aggs/agg_config.ts +++ b/src/plugins/data/common/search/aggs/agg_config.ts @@ -21,12 +21,13 @@ import _ from 'lodash'; import { i18n } from '@kbn/i18n'; import { Assign, Ensure } from '@kbn/utility-types'; -import { FetchOptions, ISearchSource } from 'src/plugins/data/public'; +import { ISearchSource } from 'src/plugins/data/public'; import { ExpressionAstFunction, ExpressionAstArgument, SerializedFieldFormat, } from 'src/plugins/expressions/common'; +import { ISearchOptions } from '../es_search'; import { IAggType } from './agg_type'; import { writeParams } from './agg_params'; @@ -213,11 +214,11 @@ export class AggConfig { /** * Hook for pre-flight logic, see AggType#onSearchRequestStart - * @param {Courier.SearchSource} searchSource - * @param {Courier.FetchOptions} options + * @param {SearchSource} searchSource + * @param {ISearchOptions} options * @return {Promise} */ - onSearchRequestStart(searchSource: ISearchSource, options?: FetchOptions) { + onSearchRequestStart(searchSource: ISearchSource, options?: ISearchOptions) { if (!this.type) { return Promise.resolve(); } diff --git a/src/plugins/data/common/search/aggs/agg_configs.ts b/src/plugins/data/common/search/aggs/agg_configs.ts index 203eda3a907ee..282e6f3b538a4 100644 --- a/src/plugins/data/common/search/aggs/agg_configs.ts +++ b/src/plugins/data/common/search/aggs/agg_configs.ts @@ -20,7 +20,7 @@ import _ from 'lodash'; import { Assign } from '@kbn/utility-types'; -import { FetchOptions, ISearchSource } from 'src/plugins/data/public'; +import { ISearchOptions, ISearchSource } from 'src/plugins/data/public'; import { AggConfig, AggConfigSerialized, IAggConfig } from './agg_config'; import { IAggType } from './agg_type'; import { AggTypesRegistryStart } from './agg_types_registry'; @@ -300,7 +300,7 @@ export class AggConfigs { return _.find(reqAgg.getResponseAggs(), { id }); } - onSearchRequestStart(searchSource: ISearchSource, options?: FetchOptions) { + onSearchRequestStart(searchSource: ISearchSource, options?: ISearchOptions) { return Promise.all( // @ts-ignore this.getRequestAggs().map((agg: AggConfig) => agg.onSearchRequestStart(searchSource, options)) diff --git a/src/plugins/data/common/search/aggs/agg_type.test.ts b/src/plugins/data/common/search/aggs/agg_type.test.ts index bf1136159dfe8..16a5586858ab9 100644 --- a/src/plugins/data/common/search/aggs/agg_type.test.ts +++ b/src/plugins/data/common/search/aggs/agg_type.test.ts @@ -147,6 +147,9 @@ describe('AggType Class', () => { }, }, }, + aggConfigs: { + indexPattern: { getFormatterForField: () => ({ toJSON: () => ({ id: 'format' }) }) }, + }, } as unknown) as IAggConfig; const aggType = new AggType({ name: 'name', diff --git a/src/plugins/data/common/search/aggs/agg_type.ts b/src/plugins/data/common/search/aggs/agg_type.ts index 2ee604c1bf25d..1e3839038b0f7 100644 --- a/src/plugins/data/common/search/aggs/agg_type.ts +++ b/src/plugins/data/common/search/aggs/agg_type.ts @@ -271,7 +271,9 @@ export class AggType< this.getSerializedFormat = config.getSerializedFormat || ((agg: TAggConfig) => { - return agg.params.field ? agg.params.field.format.toJSON() : {}; + return agg.params.field + ? agg.aggConfigs.indexPattern.getFormatterForField(agg.params.field).toJSON() + : {}; }); this.getValue = config.getValue || ((agg: TAggConfig, bucket: any) => {}); diff --git a/src/plugins/data/common/search/aggs/buckets/_interval_options.ts b/src/plugins/data/common/search/aggs/buckets/_interval_options.ts index 00cf50c272fa0..f94484a6edc2e 100644 --- a/src/plugins/data/common/search/aggs/buckets/_interval_options.ts +++ b/src/plugins/data/common/search/aggs/buckets/_interval_options.ts @@ -20,12 +20,15 @@ import { i18n } from '@kbn/i18n'; import { IBucketAggConfig } from './bucket_agg_type'; +export const autoInterval = 'auto'; +export const isAutoInterval = (value: unknown) => value === autoInterval; + export const intervalOptions = [ { display: i18n.translate('data.search.aggs.buckets.intervalOptions.autoDisplayName', { defaultMessage: 'Auto', }), - val: 'auto', + val: autoInterval, enabled(agg: IBucketAggConfig) { // not only do we need a time field, but the selected field needs // to be the time field. (see #3028) diff --git a/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts b/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts index 1a7deafb548ae..7c09d2e64e8b7 100644 --- a/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts +++ b/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts @@ -140,7 +140,7 @@ export const buildOtherBucketAgg = ( const bucketAggs = aggConfigs.aggs.filter((agg) => agg.type.type === AggGroupNames.Buckets); const index = bucketAggs.findIndex((agg) => agg.id === aggWithOtherBucket.id); const aggs = aggConfigs.toDsl(); - const indexPattern = aggWithOtherBucket.params.field.indexPattern; + const indexPattern = aggWithOtherBucket.aggConfigs.indexPattern; // create filters aggregation const filterAgg = aggConfigs.createAggConfig( @@ -211,7 +211,7 @@ export const buildOtherBucketAgg = ( filters.push( buildExistsFilter( aggWithOtherBucket.params.field, - aggWithOtherBucket.params.field.indexPattern + aggWithOtherBucket.aggConfigs.indexPattern ) ); } @@ -264,7 +264,7 @@ export const mergeOtherBucketAggResponse = ( const phraseFilter = buildPhrasesFilter( otherAgg.params.field, requestFilterTerms, - otherAgg.params.field.indexPattern + otherAgg.aggConfigs.indexPattern ); phraseFilter.meta.negate = true; bucket.filters = [phraseFilter]; @@ -276,7 +276,7 @@ export const mergeOtherBucketAggResponse = ( ) ) { bucket.filters.push( - buildExistsFilter(otherAgg.params.field, otherAgg.params.field.indexPattern) + buildExistsFilter(otherAgg.params.field, otherAgg.aggConfigs.indexPattern) ); } aggResultBuckets.push(bucket); diff --git a/src/plugins/data/common/search/aggs/buckets/create_filter/date_histogram.test.ts b/src/plugins/data/common/search/aggs/buckets/create_filter/date_histogram.test.ts index 143d549836900..3d0224b213e8d 100644 --- a/src/plugins/data/common/search/aggs/buckets/create_filter/date_histogram.test.ts +++ b/src/plugins/data/common/search/aggs/buckets/create_filter/date_histogram.test.ts @@ -19,7 +19,7 @@ import moment from 'moment'; import { createFilterDateHistogram } from './date_histogram'; -import { intervalOptions } from '../_interval_options'; +import { intervalOptions, autoInterval } from '../_interval_options'; import { AggConfigs } from '../../agg_configs'; import { mockAggTypesRegistry } from '../../test_helpers'; import { IBucketDateHistogramAggConfig } from '../date_histogram'; @@ -33,7 +33,10 @@ describe('AggConfig Filters', () => { let bucketStart: any; let field: any; - const init = (interval: string = 'auto', duration: any = moment.duration(15, 'minutes')) => { + const init = ( + interval: string = autoInterval, + duration: any = moment.duration(15, 'minutes') + ) => { field = { name: 'date', }; diff --git a/src/plugins/data/common/search/aggs/buckets/create_filter/histogram.test.ts b/src/plugins/data/common/search/aggs/buckets/create_filter/histogram.test.ts index b57d530ef40e8..dc1d0ec0a152f 100644 --- a/src/plugins/data/common/search/aggs/buckets/create_filter/histogram.test.ts +++ b/src/plugins/data/common/search/aggs/buckets/create_filter/histogram.test.ts @@ -40,6 +40,7 @@ describe('AggConfig Filters', () => { getByName: () => field, filter: () => [field], }, + getFormatterForField: () => new BytesFormat({}, getConfig), } as any; return new AggConfigs( diff --git a/src/plugins/data/common/search/aggs/buckets/create_filter/range.test.ts b/src/plugins/data/common/search/aggs/buckets/create_filter/range.test.ts index 30af970f55aa9..b53ae44c05075 100644 --- a/src/plugins/data/common/search/aggs/buckets/create_filter/range.test.ts +++ b/src/plugins/data/common/search/aggs/buckets/create_filter/range.test.ts @@ -30,7 +30,6 @@ describe('AggConfig Filters', () => { const getAggConfigs = () => { const field = { name: 'bytes', - format: new BytesFormat({}, getConfig), }; const indexPattern = { @@ -40,6 +39,7 @@ describe('AggConfig Filters', () => { getByName: () => field, filter: () => [field], }, + getFormatterForField: () => new BytesFormat({}, getConfig), } as any; return new AggConfigs( diff --git a/src/plugins/data/common/search/aggs/buckets/create_filter/terms.ts b/src/plugins/data/common/search/aggs/buckets/create_filter/terms.ts index 95de19b96abd4..ccd1cf6e358b4 100644 --- a/src/plugins/data/common/search/aggs/buckets/create_filter/terms.ts +++ b/src/plugins/data/common/search/aggs/buckets/create_filter/terms.ts @@ -27,7 +27,7 @@ import { export const createFilterTerms = (aggConfig: IBucketAggConfig, key: string, params: any) => { const field = aggConfig.params.field; - const indexPattern = field.indexPattern; + const indexPattern = aggConfig.aggConfigs.indexPattern; if (key === '__other__') { const terms = params.terms; diff --git a/src/plugins/data/common/search/aggs/buckets/date_histogram.ts b/src/plugins/data/common/search/aggs/buckets/date_histogram.ts index fdf9c456b3876..c273ca53a5fed 100644 --- a/src/plugins/data/common/search/aggs/buckets/date_histogram.ts +++ b/src/plugins/data/common/search/aggs/buckets/date_histogram.ts @@ -23,7 +23,7 @@ import { i18n } from '@kbn/i18n'; import { KBN_FIELD_TYPES, TimeRange, TimeRangeBounds, UI_SETTINGS } from '../../../../common'; -import { intervalOptions } from './_interval_options'; +import { intervalOptions, autoInterval, isAutoInterval } from './_interval_options'; import { createFilterDateHistogram } from './create_filter/date_histogram'; import { BucketAggType, IBucketAggConfig } from './bucket_agg_type'; import { BUCKET_TYPES } from './bucket_agg_types'; @@ -44,7 +44,7 @@ const updateTimeBuckets = ( customBuckets?: IBucketDateHistogramAggConfig['buckets'] ) => { const bounds = - agg.params.timeRange && (agg.fieldIsTimeField() || agg.params.interval === 'auto') + agg.params.timeRange && (agg.fieldIsTimeField() || isAutoInterval(agg.params.interval)) ? calculateBounds(agg.params.timeRange) : undefined; const buckets = customBuckets || agg.buckets; @@ -149,7 +149,7 @@ export const getDateHistogramBucketAgg = ({ return agg.getIndexPattern().timeFieldName; }, onChange(agg: IBucketDateHistogramAggConfig) { - if (get(agg, 'params.interval') === 'auto' && !agg.fieldIsTimeField()) { + if (isAutoInterval(get(agg, 'params.interval')) && !agg.fieldIsTimeField()) { delete agg.params.interval; } }, @@ -187,7 +187,7 @@ export const getDateHistogramBucketAgg = ({ } return state; }, - default: 'auto', + default: autoInterval, options: intervalOptions, write(agg, output, aggs) { updateTimeBuckets(agg, calculateBounds); diff --git a/src/plugins/data/common/search/aggs/buckets/date_range.ts b/src/plugins/data/common/search/aggs/buckets/date_range.ts index eda35a77afa5f..f9a3acb990fbf 100644 --- a/src/plugins/data/common/search/aggs/buckets/date_range.ts +++ b/src/plugins/data/common/search/aggs/buckets/date_range.ts @@ -58,7 +58,9 @@ export const getDateRangeBucketAgg = ({ getSerializedFormat(agg) { return { id: 'date_range', - params: agg.params.field ? agg.params.field.format.toJSON() : {}, + params: agg.params.field + ? agg.aggConfigs.indexPattern.getFormatterForField(agg.params.field).toJSON() + : {}, }; }, makeLabel(aggConfig) { diff --git a/src/plugins/data/common/search/aggs/buckets/histogram.test.ts b/src/plugins/data/common/search/aggs/buckets/histogram.test.ts index 3727747984d3e..a8ac72c174c72 100644 --- a/src/plugins/data/common/search/aggs/buckets/histogram.test.ts +++ b/src/plugins/data/common/search/aggs/buckets/histogram.test.ts @@ -103,7 +103,28 @@ describe('Histogram Agg', () => { }); }); + describe('maxBars', () => { + test('should not be written to the DSL', () => { + const aggConfigs = getAggConfigs({ + maxBars: 50, + field: { + name: 'field', + }, + }); + const { [BUCKET_TYPES.HISTOGRAM]: params } = aggConfigs.aggs[0].toDsl(); + + expect(params).not.toHaveProperty('maxBars'); + }); + }); + describe('interval', () => { + test('accepts "auto" value', () => { + const params = getParams({ + interval: 'auto', + }); + + expect(params).toHaveProperty('interval', 1); + }); test('accepts a whole number', () => { const params = getParams({ interval: 100, diff --git a/src/plugins/data/common/search/aggs/buckets/histogram.ts b/src/plugins/data/common/search/aggs/buckets/histogram.ts index 2b263013e55a2..4b631e1fd7cd7 100644 --- a/src/plugins/data/common/search/aggs/buckets/histogram.ts +++ b/src/plugins/data/common/search/aggs/buckets/histogram.ts @@ -28,6 +28,8 @@ import { BucketAggType, IBucketAggConfig } from './bucket_agg_type'; import { createFilterHistogram } from './create_filter/histogram'; import { BUCKET_TYPES } from './bucket_agg_types'; import { ExtendedBounds } from './lib/extended_bounds'; +import { isAutoInterval, autoInterval } from './_interval_options'; +import { calculateHistogramInterval } from './lib/histogram_calculate_interval'; export interface AutoBounds { min: number; @@ -47,6 +49,7 @@ export interface IBucketHistogramAggConfig extends IBucketAggConfig { export interface AggParamsHistogram extends BaseAggParams { field: string; interval: string; + maxBars?: number; intervalBase?: number; min_doc_count?: boolean; has_extended_bounds?: boolean; @@ -102,6 +105,7 @@ export const getHistogramBucketAgg = ({ }, { name: 'interval', + default: autoInterval, modifyAggConfigOnSearchRequestStart( aggConfig: IBucketHistogramAggConfig, searchSource: any, @@ -127,9 +131,12 @@ export const getHistogramBucketAgg = ({ return childSearchSource .fetch(options) .then((resp: any) => { + const min = resp.aggregations?.minAgg?.value ?? 0; + const max = resp.aggregations?.maxAgg?.value ?? 0; + aggConfig.setAutoBounds({ - min: get(resp, 'aggregations.minAgg.value'), - max: get(resp, 'aggregations.maxAgg.value'), + min, + max, }); }) .catch((e: Error) => { @@ -143,46 +150,24 @@ export const getHistogramBucketAgg = ({ }); }, write(aggConfig, output) { - let interval = parseFloat(aggConfig.params.interval); - if (interval <= 0) { - interval = 1; - } - const autoBounds = aggConfig.getAutoBounds(); - - // ensure interval does not create too many buckets and crash browser - if (autoBounds) { - const range = autoBounds.max - autoBounds.min; - const bars = range / interval; - - if (bars > getConfig(UI_SETTINGS.HISTOGRAM_MAX_BARS)) { - const minInterval = range / getConfig(UI_SETTINGS.HISTOGRAM_MAX_BARS); - - // Round interval by order of magnitude to provide clean intervals - // Always round interval up so there will always be less buckets than histogram:maxBars - const orderOfMagnitude = Math.pow(10, Math.floor(Math.log10(minInterval))); - let roundInterval = orderOfMagnitude; - - while (roundInterval < minInterval) { - roundInterval += orderOfMagnitude; - } - interval = roundInterval; - } - } - const base = aggConfig.params.intervalBase; - - if (base) { - if (interval < base) { - // In case the specified interval is below the base, just increase it to it's base - interval = base; - } else if (interval % base !== 0) { - // In case the interval is not a multiple of the base round it to the next base - interval = Math.round(interval / base) * base; - } - } - - output.params.interval = interval; + const values = aggConfig.getAutoBounds(); + + output.params.interval = calculateHistogramInterval({ + values, + interval: aggConfig.params.interval, + maxBucketsUiSettings: getConfig(UI_SETTINGS.HISTOGRAM_MAX_BARS), + maxBucketsUserInput: aggConfig.params.maxBars, + intervalBase: aggConfig.params.intervalBase, + }); }, }, + { + name: 'maxBars', + shouldShow(agg) { + return isAutoInterval(get(agg, 'params.interval')); + }, + write: () => {}, + }, { name: 'min_doc_count', default: false, diff --git a/src/plugins/data/common/search/aggs/buckets/histogram_fn.test.ts b/src/plugins/data/common/search/aggs/buckets/histogram_fn.test.ts index 34b6fa1a6dcd6..354946f99a2f5 100644 --- a/src/plugins/data/common/search/aggs/buckets/histogram_fn.test.ts +++ b/src/plugins/data/common/search/aggs/buckets/histogram_fn.test.ts @@ -43,6 +43,7 @@ describe('agg_expression_functions', () => { "interval": "10", "intervalBase": undefined, "json": undefined, + "maxBars": undefined, "min_doc_count": undefined, }, "schema": undefined, @@ -55,8 +56,9 @@ describe('agg_expression_functions', () => { test('includes optional params when they are provided', () => { const actual = fn({ field: 'field', - interval: '10', + interval: 'auto', intervalBase: 1, + maxBars: 25, min_doc_count: false, has_extended_bounds: false, extended_bounds: JSON.stringify({ @@ -77,9 +79,10 @@ describe('agg_expression_functions', () => { }, "field": "field", "has_extended_bounds": false, - "interval": "10", + "interval": "auto", "intervalBase": 1, "json": undefined, + "maxBars": 25, "min_doc_count": false, }, "schema": undefined, diff --git a/src/plugins/data/common/search/aggs/buckets/histogram_fn.ts b/src/plugins/data/common/search/aggs/buckets/histogram_fn.ts index 877fd13e59f87..2e833bbe0a3eb 100644 --- a/src/plugins/data/common/search/aggs/buckets/histogram_fn.ts +++ b/src/plugins/data/common/search/aggs/buckets/histogram_fn.ts @@ -85,6 +85,12 @@ export const aggHistogram = (): FunctionDefinition => ({ defaultMessage: 'Specifies whether to use min_doc_count for this aggregation', }), }, + maxBars: { + types: ['number'], + help: i18n.translate('data.search.aggs.buckets.histogram.maxBars.help', { + defaultMessage: 'Calculate interval to get approximately this many bars', + }), + }, has_extended_bounds: { types: ['boolean'], help: i18n.translate('data.search.aggs.buckets.histogram.hasExtendedBounds.help', { diff --git a/src/plugins/data/common/search/aggs/buckets/ip_range.ts b/src/plugins/data/common/search/aggs/buckets/ip_range.ts index 46e0b62d0f8d7..d0a6174b011fc 100644 --- a/src/plugins/data/common/search/aggs/buckets/ip_range.ts +++ b/src/plugins/data/common/search/aggs/buckets/ip_range.ts @@ -59,7 +59,9 @@ export const getIpRangeBucketAgg = () => getSerializedFormat(agg) { return { id: 'ip_range', - params: agg.params.field ? agg.params.field.format.toJSON() : {}, + params: agg.params.field + ? agg.aggConfigs.indexPattern.getFormatterForField(agg.params.field).toJSON() + : {}, }; }, makeLabel(aggConfig) { diff --git a/src/plugins/data/common/search/aggs/buckets/lib/histogram_calculate_interval.test.ts b/src/plugins/data/common/search/aggs/buckets/lib/histogram_calculate_interval.test.ts new file mode 100644 index 0000000000000..fd788d3339295 --- /dev/null +++ b/src/plugins/data/common/search/aggs/buckets/lib/histogram_calculate_interval.test.ts @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + calculateHistogramInterval, + CalculateHistogramIntervalParams, +} from './histogram_calculate_interval'; + +describe('calculateHistogramInterval', () => { + describe('auto calculating mode', () => { + let params: CalculateHistogramIntervalParams; + + beforeEach(() => { + params = { + interval: 'auto', + intervalBase: undefined, + maxBucketsUiSettings: 100, + maxBucketsUserInput: undefined, + values: { + min: 0, + max: 1, + }, + }; + }); + + describe('maxBucketsUserInput is defined', () => { + test('should not set interval which more than largest possible', () => { + const p = { + ...params, + maxBucketsUserInput: 200, + values: { + min: 150, + max: 250, + }, + }; + expect(calculateHistogramInterval(p)).toEqual(1); + }); + + test('should correctly work for float numbers (small numbers)', () => { + expect( + calculateHistogramInterval({ + ...params, + maxBucketsUserInput: 50, + values: { + min: 0.1, + max: 0.9, + }, + }) + ).toBe(0.02); + }); + + test('should correctly work for float numbers (big numbers)', () => { + expect( + calculateHistogramInterval({ + ...params, + maxBucketsUserInput: 10, + values: { + min: 10.45, + max: 1000.05, + }, + }) + ).toBe(100); + }); + }); + + describe('maxBucketsUserInput is not defined', () => { + test('should not set interval which more than largest possible', () => { + expect( + calculateHistogramInterval({ + ...params, + values: { + min: 0, + max: 100, + }, + }) + ).toEqual(1); + }); + + test('should set intervals for integer numbers (diff less than maxBucketsUiSettings)', () => { + expect( + calculateHistogramInterval({ + ...params, + values: { + min: 1, + max: 10, + }, + }) + ).toEqual(0.1); + }); + + test('should set intervals for integer numbers (diff more than maxBucketsUiSettings)', () => { + // diff === 44445; interval === 500; buckets === 89 + expect( + calculateHistogramInterval({ + ...params, + values: { + min: 45678, + max: 90123, + }, + }) + ).toEqual(500); + }); + + test('should set intervals the same for the same interval', () => { + // both diffs are the same + // diff === 1.655; interval === 0.02; buckets === 82 + expect( + calculateHistogramInterval({ + ...params, + values: { + min: 1.245, + max: 2.9, + }, + }) + ).toEqual(0.02); + expect( + calculateHistogramInterval({ + ...params, + values: { + min: 0.5, + max: 2.3, + }, + }) + ).toEqual(0.02); + }); + }); + }); +}); diff --git a/src/plugins/data/common/search/aggs/buckets/lib/histogram_calculate_interval.ts b/src/plugins/data/common/search/aggs/buckets/lib/histogram_calculate_interval.ts new file mode 100644 index 0000000000000..f4e42fa8881ef --- /dev/null +++ b/src/plugins/data/common/search/aggs/buckets/lib/histogram_calculate_interval.ts @@ -0,0 +1,143 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { isAutoInterval } from '../_interval_options'; + +interface IntervalValuesRange { + min: number; + max: number; +} + +export interface CalculateHistogramIntervalParams { + interval: string; + maxBucketsUiSettings: number; + maxBucketsUserInput?: number; + intervalBase?: number; + values?: IntervalValuesRange; +} + +/** + * Round interval by order of magnitude to provide clean intervals + */ +const roundInterval = (minInterval: number) => { + const orderOfMagnitude = Math.pow(10, Math.floor(Math.log10(minInterval))); + let interval = orderOfMagnitude; + + while (interval < minInterval) { + interval += orderOfMagnitude; + } + + return interval; +}; + +const calculateForGivenInterval = ( + diff: number, + interval: number, + maxBucketsUiSettings: CalculateHistogramIntervalParams['maxBucketsUiSettings'] +) => { + const bars = diff / interval; + + if (bars > maxBucketsUiSettings) { + const minInterval = diff / maxBucketsUiSettings; + + return roundInterval(minInterval); + } + + return interval; +}; + +/** + * Algorithm for determining auto-interval + + 1. Define maxBars as Math.min(, ) + 2. Find the min and max values in the data + 3. Subtract the min from max to get diff + 4. Set exactInterval to diff / maxBars + 5. Based on exactInterval, find the power of 10 that's lower and higher + 6. Find the number of expected buckets that lowerPower would create: diff / lowerPower + 7. Find the number of expected buckets that higherPower would create: diff / higherPower + 8. There are three possible final intervals, pick the one that's closest to maxBars: + - The lower power of 10 + - The lower power of 10, times 2 + - The lower power of 10, times 5 + **/ +const calculateAutoInterval = ( + diff: number, + maxBucketsUiSettings: CalculateHistogramIntervalParams['maxBucketsUiSettings'], + maxBucketsUserInput: CalculateHistogramIntervalParams['maxBucketsUserInput'] +) => { + const maxBars = Math.min(maxBucketsUiSettings, maxBucketsUserInput ?? maxBucketsUiSettings); + const exactInterval = diff / maxBars; + + const lowerPower = Math.pow(10, Math.floor(Math.log10(exactInterval))); + + const autoBuckets = diff / lowerPower; + + if (autoBuckets > maxBars) { + if (autoBuckets / 2 <= maxBars) { + return lowerPower * 2; + } else if (autoBuckets / 5 <= maxBars) { + return lowerPower * 5; + } else { + return lowerPower * 10; + } + } + + return lowerPower; +}; + +export const calculateHistogramInterval = ({ + interval, + maxBucketsUiSettings, + maxBucketsUserInput, + intervalBase, + values, +}: CalculateHistogramIntervalParams) => { + const isAuto = isAutoInterval(interval); + let calculatedInterval = isAuto ? 0 : parseFloat(interval); + + // should return NaN on non-numeric or invalid values + if (Number.isNaN(calculatedInterval)) { + return calculatedInterval; + } + + if (values) { + const diff = values.max - values.min; + + if (diff) { + calculatedInterval = isAuto + ? calculateAutoInterval(diff, maxBucketsUiSettings, maxBucketsUserInput) + : calculateForGivenInterval(diff, calculatedInterval, maxBucketsUiSettings); + } + } + + if (intervalBase) { + if (calculatedInterval < intervalBase) { + // In case the specified interval is below the base, just increase it to it's base + calculatedInterval = intervalBase; + } else if (calculatedInterval % intervalBase !== 0) { + // In case the interval is not a multiple of the base round it to the next base + calculatedInterval = Math.round(calculatedInterval / intervalBase) * intervalBase; + } + } + + const defaultValueForUnspecifiedInterval = 1; + + return calculatedInterval || defaultValueForUnspecifiedInterval; +}; diff --git a/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.test.ts b/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.test.ts index ae7630ecd3dac..04e64233ce196 100644 --- a/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.test.ts +++ b/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.test.ts @@ -20,6 +20,7 @@ import moment from 'moment'; import { TimeBuckets, TimeBucketsConfig } from './time_buckets'; +import { autoInterval } from '../../_interval_options'; describe('TimeBuckets', () => { const timeBucketConfig: TimeBucketsConfig = { @@ -103,7 +104,7 @@ describe('TimeBuckets', () => { test('setInterval/getInterval - intreval is a "auto"', () => { const timeBuckets = new TimeBuckets(timeBucketConfig); - timeBuckets.setInterval('auto'); + timeBuckets.setInterval(autoInterval); const interval = timeBuckets.getInterval(); expect(interval.description).toEqual('0 milliseconds'); diff --git a/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.ts b/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.ts index 6402a6e83ead9..d054df0c9274e 100644 --- a/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.ts +++ b/src/plugins/data/common/search/aggs/buckets/lib/time_buckets/time_buckets.ts @@ -28,6 +28,7 @@ import { convertIntervalToEsInterval, EsInterval, } from './calc_es_interval'; +import { autoInterval } from '../../_interval_options'; interface TimeBucketsInterval extends moment.Duration { // TODO double-check whether all of these are needed @@ -189,8 +190,8 @@ export class TimeBuckets { interval = input.val; } - if (!interval || interval === 'auto') { - this._i = 'auto'; + if (!interval || interval === autoInterval) { + this._i = autoInterval; return; } diff --git a/src/plugins/data/common/search/aggs/buckets/range.test.ts b/src/plugins/data/common/search/aggs/buckets/range.test.ts index b23b03db6a9ec..b8241e04ea1ee 100644 --- a/src/plugins/data/common/search/aggs/buckets/range.test.ts +++ b/src/plugins/data/common/search/aggs/buckets/range.test.ts @@ -27,12 +27,6 @@ describe('Range Agg', () => { const getAggConfigs = () => { const field = { name: 'bytes', - format: new NumberFormat( - { - pattern: '0,0.[000] b', - }, - getConfig - ), }; const indexPattern = { @@ -42,6 +36,13 @@ describe('Range Agg', () => { getByName: () => field, filter: () => [field], }, + getFormatterForField: () => + new NumberFormat( + { + pattern: '0,0.[000] b', + }, + getConfig + ), } as any; return new AggConfigs( diff --git a/src/plugins/data/common/search/aggs/buckets/range.ts b/src/plugins/data/common/search/aggs/buckets/range.ts index 91a357b635950..169b234845274 100644 --- a/src/plugins/data/common/search/aggs/buckets/range.ts +++ b/src/plugins/data/common/search/aggs/buckets/range.ts @@ -78,7 +78,9 @@ export const getRangeBucketAgg = ({ getFieldFormatsStart }: RangeBucketAggDepend return key; }, getSerializedFormat(agg) { - const format = agg.params.field ? agg.params.field.format.toJSON() : {}; + const format = agg.params.field + ? agg.aggConfigs.indexPattern.getFormatterForField(agg.params.field).toJSON() + : { id: undefined, params: undefined }; return { id: 'range', params: { diff --git a/src/plugins/data/common/search/aggs/buckets/terms.ts b/src/plugins/data/common/search/aggs/buckets/terms.ts index 5c8483cf21369..1363d38748c8b 100644 --- a/src/plugins/data/common/search/aggs/buckets/terms.ts +++ b/src/plugins/data/common/search/aggs/buckets/terms.ts @@ -82,7 +82,9 @@ export const getTermsBucketAgg = () => return agg.getFieldDisplayName() + ': ' + params.order.text; }, getSerializedFormat(agg) { - const format = agg.params.field ? agg.params.field.format.toJSON() : {}; + const format = agg.params.field + ? agg.aggConfigs.indexPattern.getFormatterForField(agg.params.field).toJSON() + : { id: undefined, params: undefined }; return { id: 'terms', params: { diff --git a/src/plugins/data/common/search/aggs/metrics/parent_pipeline.test.ts b/src/plugins/data/common/search/aggs/metrics/parent_pipeline.test.ts index c6bba56f73ec7..4815ab0ac56dc 100644 --- a/src/plugins/data/common/search/aggs/metrics/parent_pipeline.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/parent_pipeline.test.ts @@ -66,9 +66,6 @@ describe('parent pipeline aggs', function () { ) => { const field = { name: 'field', - format: { - toJSON: () => ({ id: 'bytes' }), - }, }; const indexPattern = { id: '1234', @@ -77,6 +74,9 @@ describe('parent pipeline aggs', function () { getByName: () => field, filter: () => [field], }, + getFormatterForField: () => ({ + toJSON: () => ({ id: 'bytes' }), + }), } as any; const aggConfigs = new AggConfigs( diff --git a/src/plugins/data/common/search/aggs/metrics/sibling_pipeline.test.ts b/src/plugins/data/common/search/aggs/metrics/sibling_pipeline.test.ts index a157d225c839c..32737f7b7237d 100644 --- a/src/plugins/data/common/search/aggs/metrics/sibling_pipeline.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/sibling_pipeline.test.ts @@ -72,6 +72,9 @@ describe('sibling pipeline aggs', () => { getByName: () => field, filter: () => [field], }, + getFormatterForField: () => ({ + toJSON: () => ({ id: 'bytes' }), + }), } as any; const aggConfigs = new AggConfigs( diff --git a/src/plugins/data/common/search/aggs/param_types/base.ts b/src/plugins/data/common/search/aggs/param_types/base.ts index 3a12a9a54500f..c0316c974e26f 100644 --- a/src/plugins/data/common/search/aggs/param_types/base.ts +++ b/src/plugins/data/common/search/aggs/param_types/base.ts @@ -17,7 +17,7 @@ * under the License. */ -import { FetchOptions, ISearchSource } from 'src/plugins/data/public'; +import { ISearchOptions, ISearchSource } from 'src/plugins/data/public'; import { ExpressionAstFunction } from 'src/plugins/expressions/common'; import { IAggConfigs } from '../agg_configs'; import { IAggConfig } from '../agg_config'; @@ -56,7 +56,7 @@ export class BaseParamType { modifyAggConfigOnSearchRequestStart: ( aggConfig: TAggConfig, searchSource?: ISearchSource, - options?: FetchOptions + options?: ISearchOptions ) => void; constructor(config: Record) { diff --git a/src/plugins/data/common/search/aggs/utils/calculate_auto_time_expression.ts b/src/plugins/data/common/search/aggs/utils/calculate_auto_time_expression.ts index 622e8101f34ab..3637ded44c50a 100644 --- a/src/plugins/data/common/search/aggs/utils/calculate_auto_time_expression.ts +++ b/src/plugins/data/common/search/aggs/utils/calculate_auto_time_expression.ts @@ -21,6 +21,7 @@ import { UI_SETTINGS } from '../../../../common/constants'; import { TimeRange } from '../../../../common/query'; import { TimeBuckets } from '../buckets/lib/time_buckets'; import { toAbsoluteDates } from './date_interval_utils'; +import { autoInterval } from '../buckets/_interval_options'; export function getCalculateAutoTimeExpression(getConfig: (key: string) => any) { return function calculateAutoTimeExpression(range: TimeRange) { @@ -36,7 +37,7 @@ export function getCalculateAutoTimeExpression(getConfig: (key: string) => any) 'dateFormat:scaled': getConfig('dateFormat:scaled'), }); - buckets.setInterval('auto'); + buckets.setInterval(autoInterval); buckets.setBounds({ min: moment(dates.from), max: moment(dates.to), diff --git a/src/plugins/data/common/search/es_search/index.ts b/src/plugins/data/common/search/es_search/index.ts index 7bc9cada8f0ee..d8f7b5091eb8f 100644 --- a/src/plugins/data/common/search/es_search/index.ts +++ b/src/plugins/data/common/search/es_search/index.ts @@ -17,9 +17,4 @@ * under the License. */ -export { - ISearchRequestParams, - IEsSearchRequest, - IEsSearchResponse, - ES_SEARCH_STRATEGY, -} from './types'; +export * from './types'; diff --git a/src/plugins/data/common/search/es_search/types.ts b/src/plugins/data/common/search/es_search/types.ts index 3184fbe341705..81124c1e095f7 100644 --- a/src/plugins/data/common/search/es_search/types.ts +++ b/src/plugins/data/common/search/es_search/types.ts @@ -22,6 +22,17 @@ import { IKibanaSearchRequest, IKibanaSearchResponse } from '../types'; export const ES_SEARCH_STRATEGY = 'es'; +export interface ISearchOptions { + /** + * An `AbortSignal` that allows the caller of `search` to abort a search request. + */ + abortSignal?: AbortSignal; + /** + * Use this option to force using a specific server side search strategy. Leave empty to use the default strategy. + */ + strategy?: string; +} + export type ISearchRequestParams> = { trackTotalHits?: boolean; } & Search; @@ -42,3 +53,6 @@ export interface IEsSearchResponse extends IKibanaSearchResponse { isPartial?: boolean; rawResponse: SearchResponse; } + +export const isEsResponse = (response: any): response is IEsSearchResponse => + response && response.rawResponse; diff --git a/src/plugins/data/common/search/index.ts b/src/plugins/data/common/search/index.ts index d8184551b7f3d..061974d860246 100644 --- a/src/plugins/data/common/search/index.ts +++ b/src/plugins/data/common/search/index.ts @@ -22,10 +22,4 @@ export * from './es_search'; export * from './expressions'; export * from './tabify'; export * from './types'; - -export { - IEsSearchRequest, - IEsSearchResponse, - ES_SEARCH_STRATEGY, - ISearchRequestParams, -} from './es_search'; +export * from './es_search'; diff --git a/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts b/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts index a3b9b0b344823..2ad20c3807819 100644 --- a/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts +++ b/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts @@ -30,11 +30,7 @@ import { ValueClickContext } from '../../../../embeddable/public'; const mockField = { name: 'bytes', - indexPattern: { - id: 'logstash-*', - }, filterable: true, - format: new fieldFormats.BytesFormat({}, (() => {}) as FieldFormatsGetConfigFn), }; describe('createFiltersFromValueClick', () => { @@ -81,6 +77,8 @@ describe('createFiltersFromValueClick', () => { getByName: () => mockField, filter: () => [mockField], }, + getFormatterForField: () => + new fieldFormats.BytesFormat({}, (() => {}) as FieldFormatsGetConfigFn), }), } as unknown) as IndexPatternsContract); }); diff --git a/src/plugins/data/public/field_formats/field_formats_registry.stub.ts b/src/plugins/data/public/field_formats/field_formats_registry.stub.ts new file mode 100644 index 0000000000000..e8741ca41036b --- /dev/null +++ b/src/plugins/data/public/field_formats/field_formats_registry.stub.ts @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { CoreSetup } from 'src/core/public'; +import { deserializeFieldFormat } from './utils/deserialize'; +import { baseFormattersPublic } from './constants'; +import { DataPublicPluginStart, fieldFormats } from '..'; + +export const getFieldFormatsRegistry = (core: CoreSetup) => { + const fieldFormatsRegistry = new fieldFormats.FieldFormatsRegistry(); + const getConfig = core.uiSettings.get.bind(core.uiSettings); + + fieldFormatsRegistry.init(getConfig, {}, baseFormattersPublic); + + fieldFormatsRegistry.deserialize = deserializeFieldFormat.bind( + fieldFormatsRegistry as DataPublicPluginStart['fieldFormats'] + ); + + return fieldFormatsRegistry; +}; diff --git a/src/plugins/data/public/index.ts b/src/plugins/data/public/index.ts index 27b16c57ffecf..553ee6bde5f2d 100644 --- a/src/plugins/data/public/index.ts +++ b/src/plugins/data/public/index.ts @@ -262,7 +262,7 @@ export { UI_SETTINGS, TypeMeta as IndexPatternTypeMeta, AggregationRestrictions as IndexPatternAggRestrictions, - FieldList, + fieldList, } from '../common'; /* @@ -342,7 +342,6 @@ export { ES_SEARCH_STRATEGY, EsQuerySortValue, extractSearchSourceReferences, - FetchOptions, getEsPreference, getSearchParamsFromRequest, IEsSearchRequest, @@ -352,7 +351,6 @@ export { injectSearchSourceReferences, ISearch, ISearchGeneric, - ISearchOptions, ISearchSource, parseSearchSourceJSON, RequestTimeoutError, @@ -367,6 +365,8 @@ export { EsRawResponseExpressionTypeDefinition, } from './search'; +export { ISearchOptions } from '../common'; + // Search namespace export const search = { aggs: { @@ -429,6 +429,7 @@ export { TimeHistory, TimefilterContract, TimeHistoryContract, + QueryStateChange, } from './query'; export { diff --git a/src/plugins/data/public/index_patterns/index_patterns/index_patterns_api_client.test.mock.ts b/src/plugins/data/public/index_patterns/index_patterns/index_patterns_api_client.test.mock.ts index 51f4fc7ce94b9..f1f5f8737b389 100644 --- a/src/plugins/data/public/index_patterns/index_patterns/index_patterns_api_client.test.mock.ts +++ b/src/plugins/data/public/index_patterns/index_patterns/index_patterns_api_client.test.mock.ts @@ -17,10 +17,8 @@ * under the License. */ -import { setup } from 'test_utils/http_test_setup'; +import { setup } from '../../../../../core/test_helpers/http_test_setup'; export const { http } = setup((injectedMetadata) => { injectedMetadata.getBasePath.mockReturnValue('/hola/daro/'); }); - -jest.doMock('ui/new_platform', () => ({ npSetup: { core: { http } } })); diff --git a/src/plugins/data/public/plugin.ts b/src/plugins/data/public/plugin.ts index 3bc19a578a417..3b18e0fbed537 100644 --- a/src/plugins/data/public/plugin.ts +++ b/src/plugins/data/public/plugin.ts @@ -19,13 +19,7 @@ import './index.scss'; -import { - PluginInitializerContext, - CoreSetup, - CoreStart, - Plugin, - PackageInfo, -} from 'src/core/public'; +import { PluginInitializerContext, CoreSetup, CoreStart, Plugin } from 'src/core/public'; import { ConfigSchema } from '../config'; import { Storage, IStorageWrapper, createStartServicesGetter } from '../../kibana_utils/public'; import { @@ -100,7 +94,6 @@ export class DataPublicPlugin private readonly fieldFormatsService: FieldFormatsService; private readonly queryService: QueryService; private readonly storage: IStorageWrapper; - private readonly packageInfo: PackageInfo; constructor(initializerContext: PluginInitializerContext) { this.searchService = new SearchService(); @@ -108,7 +101,6 @@ export class DataPublicPlugin this.fieldFormatsService = new FieldFormatsService(); this.autocomplete = new AutocompleteService(initializerContext); this.storage = new Storage(window.localStorage); - this.packageInfo = initializerContext.env.packageInfo; } public setup( @@ -145,7 +137,6 @@ export class DataPublicPlugin const searchService = this.searchService.setup(core, { usageCollection, - packageInfo: this.packageInfo, expressions, }); diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md index ba40dece25df9..27d4ea49f9eb1 100644 --- a/src/plugins/data/public/public.api.md +++ b/src/plugins/data/public/public.api.md @@ -26,11 +26,12 @@ import { EuiGlobalToastListToast } from '@elastic/eui'; import { ExclusiveUnion } from '@elastic/eui'; import { ExpressionAstFunction } from 'src/plugins/expressions/common'; import { ExpressionsSetup } from 'src/plugins/expressions/public'; -import { FetchOptions as FetchOptions_2 } from 'src/plugins/data/public'; import { History } from 'history'; import { Href } from 'history'; +import { HttpStart } from 'src/core/public'; import { IconType } from '@elastic/eui'; import { InjectedIntl } from '@kbn/i18n/react'; +import { ISearchOptions as ISearchOptions_2 } from 'src/plugins/data/public'; import { ISearchSource as ISearchSource_2 } from 'src/plugins/data/public'; import { IStorageWrapper } from 'src/plugins/kibana_utils/public'; import { IUiSettingsClient } from 'src/core/public'; @@ -486,16 +487,6 @@ export const extractSearchSourceReferences: (state: SearchSourceFields) => [Sear indexRefName?: string; }, SavedObjectReference[]]; -// Warning: (ae-missing-release-tag) "FetchOptions" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public (undocumented) -export interface FetchOptions { - // (undocumented) - abortSignal?: AbortSignal; - // (undocumented) - searchStrategyId?: string; -} - // Warning: (ae-missing-release-tag) "FieldFormat" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) @@ -604,46 +595,11 @@ export type FieldFormatsContentType = 'html' | 'text'; // @public (undocumented) export type FieldFormatsGetConfigFn = GetConfigFn; -// Warning: (ae-missing-release-tag) "FieldList" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) +// Warning: (ae-forgotten-export) The symbol "FieldSpec" needs to be exported by the entry point index.d.ts +// Warning: (ae-missing-release-tag) "fieldList" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) -export class FieldList extends Array implements IIndexPatternFieldList { - // Warning: (ae-forgotten-export) The symbol "FieldSpec" needs to be exported by the entry point index.d.ts - // Warning: (ae-forgotten-export) The symbol "OnNotification" needs to be exported by the entry point index.d.ts - constructor(indexPattern: IndexPattern, specs?: FieldSpec[], shortDotsEnable?: boolean, onNotification?: OnNotification); - // (undocumented) - readonly add: (field: FieldSpec) => void; - // (undocumented) - readonly getAll: () => IndexPatternField[]; - // (undocumented) - readonly getByName: (name: IndexPatternField['name']) => IndexPatternField | undefined; - // (undocumented) - readonly getByType: (type: IndexPatternField['type']) => any[]; - // (undocumented) - readonly remove: (field: IFieldType) => void; - // (undocumented) - readonly removeAll: () => void; - // (undocumented) - readonly replaceAll: (specs: FieldSpec[]) => void; - // (undocumented) - readonly toSpec: () => { - count: number; - script: string | undefined; - lang: string | undefined; - conflictDescriptions: Record | undefined; - name: string; - type: string; - esTypes: string[] | undefined; - scripted: boolean; - searchable: boolean; - aggregatable: boolean; - readFromDocValues: boolean; - subType: import("../types").IFieldSubType | undefined; - format: any; - }[]; - // (undocumented) - readonly update: (field: FieldSpec) => void; -} +export const fieldList: (specs?: FieldSpec[], shortDotsEnable?: boolean) => IIndexPatternFieldList; // @public (undocumented) export interface FieldMappingSpec { @@ -737,7 +693,6 @@ export const getKbnTypeNames: () => string[]; // // @public (undocumented) export function getSearchParamsFromRequest(searchRequest: SearchRequest, dependencies: { - esShardTimeout: number; getConfig: GetConfigFn; }): ISearchRequestParams; @@ -868,7 +823,9 @@ export interface IFieldType { // (undocumented) subType?: IFieldSubType; // (undocumented) - toSpec?: () => FieldSpec; + toSpec?: (options?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }) => FieldSpec; // (undocumented) type: string; // (undocumented) @@ -919,6 +876,10 @@ export interface IIndexPatternFieldList extends Array { // (undocumented) replaceAll(specs: FieldSpec[]): void; // (undocumented) + toSpec(options?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }): FieldSpec[]; + // (undocumented) update(field: FieldSpec): void; } @@ -950,7 +911,7 @@ export type IMetricAggType = MetricAggType; // @public (undocumented) export class IndexPattern implements IIndexPattern { // Warning: (ae-forgotten-export) The symbol "IndexPatternDeps" needs to be exported by the entry point index.d.ts - constructor(id: string | undefined, { savedObjectsClient, apiClient, patternCache, fieldFormats, onNotification, onError, shortDotsEnable, metaFields, }: IndexPatternDeps); + constructor(id: string | undefined, { savedObjectsClient, apiClient, patternCache, fieldFormats, indexPatternsService, onNotification, onError, shortDotsEnable, metaFields, }: IndexPatternDeps); // (undocumented) addScriptedField(name: string, script: string, fieldType: string | undefined, lang: string): Promise; // (undocumented) @@ -1024,6 +985,10 @@ export class IndexPattern implements IIndexPattern { // (undocumented) metaFields: string[]; // (undocumented) + originalBody: { + [key: string]: any; + }; + // (undocumented) popularizeField(fieldName: string, unit?: number): Promise; // (undocumented) prepBody(): { @@ -1039,9 +1004,7 @@ export class IndexPattern implements IIndexPattern { // (undocumented) refreshFields(): Promise; // (undocumented) - removeScriptedField(fieldName: string): Promise; - // (undocumented) - save(saveAttempts?: number): Promise; + removeScriptedField(fieldName: string): void; // Warning: (ae-forgotten-export) The symbol "SourceFilter" needs to be exported by the entry point index.d.ts // // (undocumented) @@ -1051,16 +1014,14 @@ export class IndexPattern implements IIndexPattern { // (undocumented) title: string; // (undocumented) - toJSON(): string | undefined; - // (undocumented) toSpec(): IndexPatternSpec; // (undocumented) - toString(): string; - // (undocumented) type: string | undefined; // (undocumented) typeMeta?: IndexPatternTypeMeta; - } + // (undocumented) + version: string | undefined; +} // Warning: (ae-missing-release-tag) "AggregationRestrictions" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // @@ -1100,7 +1061,7 @@ export interface IndexPatternAttributes { // // @public (undocumented) export class IndexPatternField implements IFieldType { - constructor(indexPattern: IndexPattern, spec: FieldSpec, displayName: string, onNotification: OnNotification); + constructor(spec: FieldSpec, displayName: string); // (undocumented) get aggregatable(): boolean; // (undocumented) @@ -1116,10 +1077,6 @@ export class IndexPatternField implements IFieldType { // (undocumented) get filterable(): boolean; // (undocumented) - get format(): FieldFormat; - // (undocumented) - readonly indexPattern: IndexPattern; - // (undocumented) get lang(): string | undefined; set lang(lang: string | undefined); // (undocumented) @@ -1155,7 +1112,9 @@ export class IndexPatternField implements IFieldType { subType: import("../types").IFieldSubType | undefined; }; // (undocumented) - toSpec(): { + toSpec({ getFormatterForField, }?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }): { count: number; script: string | undefined; lang: string | undefined; @@ -1168,7 +1127,10 @@ export class IndexPatternField implements IFieldType { aggregatable: boolean; readFromDocValues: boolean; subType: import("../types").IFieldSubType | undefined; - format: any; + format: { + id: any; + params: any; + } | undefined; }; // (undocumented) get type(): string; @@ -1265,9 +1227,7 @@ export type ISearchGeneric = >; +export const QueryStringInput: React.FC>; // @public (undocumented) export type QuerySuggestion = QuerySuggestionBasic | QuerySuggestionField; @@ -1721,8 +1692,8 @@ export const search: { // Warning: (ae-missing-release-tag) "SearchBar" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) -export const SearchBar: React.ComponentClass, "query" | "isLoading" | "filters" | "onRefresh" | "onRefreshChange" | "refreshInterval" | "indexPatterns" | "dataTestSubj" | "customSubmitButton" | "screenTitle" | "showQueryBar" | "showQueryInput" | "showFilterBar" | "showDatePicker" | "showAutoRefreshOnly" | "isRefreshPaused" | "dateRangeFrom" | "dateRangeTo" | "showSaveQuery" | "savedQuery" | "onQueryChange" | "onQuerySubmit" | "onSaved" | "onSavedQueryUpdated" | "onClearSavedQuery" | "indicateNoData" | "timeHistory" | "onFiltersUpdated">, any> & { - WrappedComponent: React.ComponentType & ReactIntl.InjectedIntlProps>; +export const SearchBar: React.ComponentClass, "query" | "isLoading" | "filters" | "onRefresh" | "onRefreshChange" | "refreshInterval" | "indexPatterns" | "dataTestSubj" | "timeHistory" | "customSubmitButton" | "screenTitle" | "showQueryBar" | "showQueryInput" | "showFilterBar" | "showDatePicker" | "showAutoRefreshOnly" | "isRefreshPaused" | "dateRangeFrom" | "dateRangeTo" | "showSaveQuery" | "savedQuery" | "onQueryChange" | "onQuerySubmit" | "onSaved" | "onSavedQueryUpdated" | "onClearSavedQuery" | "indicateNoData" | "onFiltersUpdated">, any> & { + WrappedComponent: React.ComponentType & ReactIntl.InjectedIntlProps>; }; // Warning: (ae-forgotten-export) The symbol "SearchBarOwnProps" needs to be exported by the entry point index.d.ts @@ -1754,7 +1725,7 @@ export interface SearchError { // // @public (undocumented) export class SearchInterceptor { - constructor(deps: SearchInterceptorDeps, requestTimeout?: number | undefined); + constructor(deps: SearchInterceptorDeps); // @internal protected abortController: AbortController; // @internal (undocumented) @@ -1768,13 +1739,14 @@ export class SearchInterceptor { protected longRunningToast?: Toast; // @internal protected pendingCount$: BehaviorSubject; - // (undocumented) - protected readonly requestTimeout?: number | undefined; - // (undocumented) + // @internal (undocumented) protected runSearch(request: IEsSearchRequest, signal: AbortSignal, strategy?: string): Observable; search(request: IEsSearchRequest, options?: ISearchOptions): Observable; - // (undocumented) - protected setupTimers(options?: ISearchOptions): { + // @internal (undocumented) + protected setupAbortSignal({ abortSignal, timeout, }: { + abortSignal?: AbortSignal; + timeout?: number; + }): { combinedSignal: AbortSignal; cleanup: () => void; }; @@ -1945,6 +1917,7 @@ export const UI_SETTINGS: { readonly COURIER_MAX_CONCURRENT_SHARD_REQUESTS: "courier:maxConcurrentShardRequests"; readonly COURIER_BATCH_SEARCHES: "courier:batchSearches"; readonly SEARCH_INCLUDE_FROZEN: "search:includeFrozen"; + readonly SEARCH_TIMEOUT: "search:timeout"; readonly HISTOGRAM_BAR_TARGET: "histogram:barTarget"; readonly HISTOGRAM_MAX_BARS: "histogram:maxBars"; readonly HISTORY_LIMIT: "history:limit"; diff --git a/src/plugins/data/public/search/expressions/create_filter.test.ts b/src/plugins/data/public/search/expressions/create_filter.test.ts index 7968c80628531..7cc336a1c20e9 100644 --- a/src/plugins/data/public/search/expressions/create_filter.test.ts +++ b/src/plugins/data/public/search/expressions/create_filter.test.ts @@ -52,6 +52,7 @@ describe('createFilter', () => { getByName: () => field, filter: () => [field], }, + getFormatterForField: () => new BytesFormat({}, (() => {}) as FieldFormatsGetConfigFn), } as any; return new AggConfigs( diff --git a/src/plugins/data/public/search/expressions/esdsl.ts b/src/plugins/data/public/search/expressions/esdsl.ts index d7b897ace29b4..2efb21671b5b7 100644 --- a/src/plugins/data/public/search/expressions/esdsl.ts +++ b/src/plugins/data/public/search/expressions/esdsl.ts @@ -140,7 +140,7 @@ export const esdsl = (): EsdslExpressionFunctionDefinition => ({ body: dsl, }, }, - { signal: abortSignal } + { abortSignal } ) .toPromise(); diff --git a/src/plugins/data/public/search/fetch/get_search_params.test.ts b/src/plugins/data/public/search/fetch/get_search_params.test.ts index 1ecb879b1602d..5e83e1f57bb6d 100644 --- a/src/plugins/data/public/search/fetch/get_search_params.test.ts +++ b/src/plugins/data/public/search/fetch/get_search_params.test.ts @@ -25,44 +25,12 @@ function getConfigStub(config: any = {}): GetConfigFn { } describe('getSearchParams', () => { - test('includes rest_total_hits_as_int', () => { - const config = getConfigStub(); + test('includes custom preference', () => { + const config = getConfigStub({ + [UI_SETTINGS.COURIER_SET_REQUEST_PREFERENCE]: 'custom', + [UI_SETTINGS.COURIER_CUSTOM_REQUEST_PREFERENCE]: 'aaa', + }); const searchParams = getSearchParams(config); - expect(searchParams.rest_total_hits_as_int).toBe(true); - }); - - test('includes ignore_unavailable', () => { - const config = getConfigStub(); - const searchParams = getSearchParams(config); - expect(searchParams.ignore_unavailable).toBe(true); - }); - - test('includes ignore_throttled according to search:includeFrozen', () => { - let config = getConfigStub({ [UI_SETTINGS.SEARCH_INCLUDE_FROZEN]: true }); - let searchParams = getSearchParams(config); - expect(searchParams.ignore_throttled).toBe(false); - - config = getConfigStub({ [UI_SETTINGS.SEARCH_INCLUDE_FROZEN]: false }); - searchParams = getSearchParams(config); - expect(searchParams.ignore_throttled).toBe(true); - }); - - test('includes max_concurrent_shard_requests according to courier:maxConcurrentShardRequests', () => { - let config = getConfigStub({ [UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS]: 0 }); - let searchParams = getSearchParams(config); - expect(searchParams.max_concurrent_shard_requests).toBe(undefined); - - config = getConfigStub({ [UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS]: 5 }); - searchParams = getSearchParams(config); - expect(searchParams.max_concurrent_shard_requests).toBe(5); - }); - - test('includes timeout according to esShardTimeout if greater than 0', () => { - const config = getConfigStub(); - let searchParams = getSearchParams(config, 0); - expect(searchParams.timeout).toBe(undefined); - - searchParams = getSearchParams(config, 100); - expect(searchParams.timeout).toBe('100ms'); + expect(searchParams.preference).toBe('aaa'); }); }); diff --git a/src/plugins/data/public/search/fetch/get_search_params.ts b/src/plugins/data/public/search/fetch/get_search_params.ts index 5e0395189f647..ed87c4813951c 100644 --- a/src/plugins/data/public/search/fetch/get_search_params.ts +++ b/src/plugins/data/public/search/fetch/get_search_params.ts @@ -22,26 +22,12 @@ import { SearchRequest } from './types'; const sessionId = Date.now(); -export function getSearchParams(getConfig: GetConfigFn, esShardTimeout: number = 0) { +export function getSearchParams(getConfig: GetConfigFn) { return { - rest_total_hits_as_int: true, - ignore_unavailable: true, - ignore_throttled: getIgnoreThrottled(getConfig), - max_concurrent_shard_requests: getMaxConcurrentShardRequests(getConfig), preference: getPreference(getConfig), - timeout: getTimeout(esShardTimeout), }; } -export function getIgnoreThrottled(getConfig: GetConfigFn) { - return !getConfig(UI_SETTINGS.SEARCH_INCLUDE_FROZEN); -} - -export function getMaxConcurrentShardRequests(getConfig: GetConfigFn) { - const maxConcurrentShardRequests = getConfig(UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS); - return maxConcurrentShardRequests > 0 ? maxConcurrentShardRequests : undefined; -} - export function getPreference(getConfig: GetConfigFn) { const setRequestPreference = getConfig(UI_SETTINGS.COURIER_SET_REQUEST_PREFERENCE); if (setRequestPreference === 'sessionId') return sessionId; @@ -50,19 +36,15 @@ export function getPreference(getConfig: GetConfigFn) { : undefined; } -export function getTimeout(esShardTimeout: number) { - return esShardTimeout > 0 ? `${esShardTimeout}ms` : undefined; -} - /** @public */ // TODO: Could provide this on runtime contract with dependencies // already wired up. export function getSearchParamsFromRequest( searchRequest: SearchRequest, - dependencies: { esShardTimeout: number; getConfig: GetConfigFn } + dependencies: { getConfig: GetConfigFn } ): ISearchRequestParams { - const { esShardTimeout, getConfig } = dependencies; - const searchParams = getSearchParams(getConfig, esShardTimeout); + const { getConfig } = dependencies; + const searchParams = getSearchParams(getConfig); return { index: searchRequest.index.title || searchRequest.index, diff --git a/src/plugins/data/public/search/fetch/index.ts b/src/plugins/data/public/search/fetch/index.ts index 79cdad1897f9c..4b8511edfc26f 100644 --- a/src/plugins/data/public/search/fetch/index.ts +++ b/src/plugins/data/public/search/fetch/index.ts @@ -18,14 +18,7 @@ */ export * from './types'; -export { - getSearchParams, - getSearchParamsFromRequest, - getPreference, - getTimeout, - getIgnoreThrottled, - getMaxConcurrentShardRequests, -} from './get_search_params'; +export { getSearchParams, getSearchParamsFromRequest, getPreference } from './get_search_params'; export { RequestFailure } from './request_error'; export { handleResponse } from './handle_response'; diff --git a/src/plugins/data/public/search/fetch/types.ts b/src/plugins/data/public/search/fetch/types.ts index 670c4f731971a..224a597766909 100644 --- a/src/plugins/data/public/search/fetch/types.ts +++ b/src/plugins/data/public/search/fetch/types.ts @@ -17,8 +17,9 @@ * under the License. */ +import { HttpStart } from 'src/core/public'; +import { BehaviorSubject } from 'rxjs'; import { GetConfigFn } from '../../../common'; -import { ISearchStartLegacy } from '../types'; /** * @internal @@ -29,15 +30,10 @@ import { ISearchStartLegacy } from '../types'; */ export type SearchRequest = Record; -export interface FetchOptions { - abortSignal?: AbortSignal; - searchStrategyId?: string; -} - export interface FetchHandlers { - legacySearchService: ISearchStartLegacy; config: { get: GetConfigFn }; - esShardTimeout: number; + http: HttpStart; + loadingCount$: BehaviorSubject; } export interface SearchError { diff --git a/src/plugins/data/public/search/index.ts b/src/plugins/data/public/search/index.ts index 14eff13b378ee..a6a1736ac91da 100644 --- a/src/plugins/data/public/search/index.ts +++ b/src/plugins/data/public/search/index.ts @@ -19,14 +19,7 @@ export * from './expressions'; -export { - ISearch, - ISearchOptions, - ISearchGeneric, - ISearchSetup, - ISearchStart, - SearchEnhancements, -} from './types'; +export { ISearch, ISearchGeneric, ISearchSetup, ISearchStart, SearchEnhancements } from './types'; export { IEsSearchResponse, IEsSearchRequest, ES_SEARCH_STRATEGY } from '../../common/search'; @@ -34,7 +27,7 @@ export { getEsPreference } from './es_search'; export { IKibanaSearchResponse, IKibanaSearchRequest } from '../../common/search'; -export { SearchError, FetchOptions, getSearchParamsFromRequest, SearchRequest } from './fetch'; +export { SearchError, getSearchParamsFromRequest, SearchRequest } from './fetch'; export { ISearchSource, diff --git a/src/plugins/data/public/search/legacy/call_client.test.ts b/src/plugins/data/public/search/legacy/call_client.test.ts index a3c4e720b4cab..943a02d22088d 100644 --- a/src/plugins/data/public/search/legacy/call_client.test.ts +++ b/src/plugins/data/public/search/legacy/call_client.test.ts @@ -17,11 +17,13 @@ * under the License. */ +import { coreMock } from '../../../../../core/public/mocks'; import { callClient } from './call_client'; import { SearchStrategySearchParams } from './types'; import { defaultSearchStrategy } from './default_search_strategy'; import { FetchHandlers } from '../fetch'; import { handleResponse } from '../fetch/handle_response'; +import { BehaviorSubject } from 'rxjs'; const mockAbortFn = jest.fn(); jest.mock('../fetch/handle_response', () => ({ @@ -54,7 +56,12 @@ describe('callClient', () => { test('Passes the additional arguments it is given to the search strategy', () => { const searchRequests = [{ _searchStrategyId: 0 }]; - const args = { legacySearchService: {}, config: {}, esShardTimeout: 0 } as FetchHandlers; + const args = { + http: coreMock.createStart().http, + legacySearchService: {}, + config: { get: jest.fn() }, + loadingCount$: new BehaviorSubject(0), + } as FetchHandlers; callClient(searchRequests, [], args); diff --git a/src/plugins/data/public/search/legacy/call_client.ts b/src/plugins/data/public/search/legacy/call_client.ts index 3dcf11f72a742..d66796b9427a1 100644 --- a/src/plugins/data/public/search/legacy/call_client.ts +++ b/src/plugins/data/public/search/legacy/call_client.ts @@ -18,21 +18,22 @@ */ import { SearchResponse } from 'elasticsearch'; -import { FetchOptions, FetchHandlers, handleResponse } from '../fetch'; +import { ISearchOptions } from 'src/plugins/data/common'; +import { FetchHandlers, handleResponse } from '../fetch'; import { defaultSearchStrategy } from './default_search_strategy'; import { SearchRequest } from '../index'; export function callClient( searchRequests: SearchRequest[], - requestsOptions: FetchOptions[] = [], + requestsOptions: ISearchOptions[] = [], fetchHandlers: FetchHandlers ) { // Correlate the options with the request that they're associated with const requestOptionEntries: Array<[ SearchRequest, - FetchOptions + ISearchOptions ]> = searchRequests.map((request, i) => [request, requestsOptions[i]]); - const requestOptionsMap = new Map(requestOptionEntries); + const requestOptionsMap = new Map(requestOptionEntries); const requestResponseMap = new Map>>(); const { searching, abort } = defaultSearchStrategy.search({ diff --git a/src/plugins/data/public/search/legacy/default_search_strategy.test.ts b/src/plugins/data/public/search/legacy/default_search_strategy.test.ts index 4148055c1eb58..e74ab49131430 100644 --- a/src/plugins/data/public/search/legacy/default_search_strategy.test.ts +++ b/src/plugins/data/public/search/legacy/default_search_strategy.test.ts @@ -17,44 +17,26 @@ * under the License. */ -import { IUiSettingsClient } from 'kibana/public'; +import { HttpStart } from 'src/core/public'; +import { coreMock } from '../../../../../core/public/mocks'; import { defaultSearchStrategy } from './default_search_strategy'; -import { searchServiceMock } from '../mocks'; import { SearchStrategySearchParams } from './types'; -import { UI_SETTINGS } from '../../../common'; +import { BehaviorSubject } from 'rxjs'; const { search } = defaultSearchStrategy; -function getConfigStub(config: any = {}) { - return { - get: (key) => config[key], - } as IUiSettingsClient; -} - -const msearchMockResponse: any = Promise.resolve([]); -msearchMockResponse.abort = jest.fn(); -const msearchMock = jest.fn().mockReturnValue(msearchMockResponse); - -const searchMockResponse: any = Promise.resolve([]); -searchMockResponse.abort = jest.fn(); -const searchMock = jest.fn().mockReturnValue(searchMockResponse); +const msearchMock = jest.fn().mockResolvedValue({ body: { responses: [] } }); describe('defaultSearchStrategy', function () { describe('search', function () { - let searchArgs: MockedKeys>; - let es: any; + let searchArgs: MockedKeys; + let http: jest.Mocked; beforeEach(() => { - msearchMockResponse.abort.mockClear(); msearchMock.mockClear(); - searchMockResponse.abort.mockClear(); - searchMock.mockClear(); - - const searchService = searchServiceMock.createStartContract(); - searchService.aggs.calculateAutoTimeExpression = jest.fn().mockReturnValue('1d'); - searchService.__LEGACY.esClient.search = searchMock; - searchService.__LEGACY.esClient.msearch = msearchMock; + http = coreMock.createStart().http; + http.post.mockResolvedValue(msearchMock); searchArgs = { searchRequests: [ @@ -62,49 +44,27 @@ describe('defaultSearchStrategy', function () { index: { title: 'foo' }, }, ], - esShardTimeout: 0, - legacySearchService: searchService.__LEGACY, + http, + config: { + get: jest.fn(), + }, + loadingCount$: new BehaviorSubject(0) as any, }; - - es = searchArgs.legacySearchService.esClient; - }); - - test('does not send max_concurrent_shard_requests by default', async () => { - const config = getConfigStub({ [UI_SETTINGS.COURIER_BATCH_SEARCHES]: true }); - await search({ ...searchArgs, config }); - expect(es.msearch.mock.calls[0][0].max_concurrent_shard_requests).toBe(undefined); - }); - - test('allows configuration of max_concurrent_shard_requests', async () => { - const config = getConfigStub({ - [UI_SETTINGS.COURIER_BATCH_SEARCHES]: true, - [UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS]: 42, - }); - await search({ ...searchArgs, config }); - expect(es.msearch.mock.calls[0][0].max_concurrent_shard_requests).toBe(42); - }); - - test('should set rest_total_hits_as_int to true on a request', async () => { - const config = getConfigStub({ [UI_SETTINGS.COURIER_BATCH_SEARCHES]: true }); - await search({ ...searchArgs, config }); - expect(es.msearch.mock.calls[0][0]).toHaveProperty('rest_total_hits_as_int', true); - }); - - test('should set ignore_throttled=false when including frozen indices', async () => { - const config = getConfigStub({ - [UI_SETTINGS.COURIER_BATCH_SEARCHES]: true, - [UI_SETTINGS.SEARCH_INCLUDE_FROZEN]: true, - }); - await search({ ...searchArgs, config }); - expect(es.msearch.mock.calls[0][0]).toHaveProperty('ignore_throttled', false); }); - test('should properly call abort with msearch', () => { - const config = getConfigStub({ - [UI_SETTINGS.COURIER_BATCH_SEARCHES]: true, - }); - search({ ...searchArgs, config }).abort(); - expect(msearchMockResponse.abort).toHaveBeenCalled(); + test('calls http.post with the correct arguments', async () => { + await search({ ...searchArgs }); + expect(http.post.mock.calls).toMatchInlineSnapshot(` + Array [ + Array [ + "/internal/_msearch", + Object { + "body": "{\\"searches\\":[{\\"header\\":{\\"index\\":\\"foo\\"}}]}", + "signal": AbortSignal {}, + }, + ], + ] + `); }); }); }); diff --git a/src/plugins/data/public/search/legacy/default_search_strategy.ts b/src/plugins/data/public/search/legacy/default_search_strategy.ts index 6ccb0a01cf898..cbcd0da20207f 100644 --- a/src/plugins/data/public/search/legacy/default_search_strategy.ts +++ b/src/plugins/data/public/search/legacy/default_search_strategy.ts @@ -17,8 +17,7 @@ * under the License. */ -import { getPreference, getTimeout } from '../fetch'; -import { getMSearchParams } from './get_msearch_params'; +import { getPreference } from '../fetch'; import { SearchStrategyProvider, SearchStrategySearchParams } from './types'; // @deprecated @@ -30,34 +29,45 @@ export const defaultSearchStrategy: SearchStrategyProvider = { }, }; -function msearch({ - searchRequests, - legacySearchService, - config, - esShardTimeout, -}: SearchStrategySearchParams) { - const es = legacySearchService.esClient; - const inlineRequests = searchRequests.map(({ index, body, search_type: searchType }) => { - const inlineHeader = { - index: index.title || index, - search_type: searchType, - ignore_unavailable: true, - preference: getPreference(config.get), +function msearch({ searchRequests, config, http, loadingCount$ }: SearchStrategySearchParams) { + const requests = searchRequests.map(({ index, body }) => { + return { + header: { + index: index.title || index, + preference: getPreference(config.get), + }, + body, }; - const inlineBody = { - ...body, - timeout: getTimeout(esShardTimeout), - }; - return `${JSON.stringify(inlineHeader)}\n${JSON.stringify(inlineBody)}`; }); - const searching = es.msearch({ - ...getMSearchParams(config.get), - body: `${inlineRequests.join('\n')}\n`, - }); + const abortController = new AbortController(); + let resolved = false; + + // Start LoadingIndicator + loadingCount$.next(loadingCount$.getValue() + 1); + + const cleanup = () => { + if (!resolved) { + resolved = true; + // Decrement loading counter & cleanup BehaviorSubject + loadingCount$.next(loadingCount$.getValue() - 1); + loadingCount$.complete(); + } + }; + + const searching = http + .post('/internal/_msearch', { + body: JSON.stringify({ searches: requests }), + signal: abortController.signal, + }) + .then(({ body }) => body?.responses) + .finally(() => cleanup()); return { - searching: searching.then(({ responses }: any) => responses), - abort: searching.abort, + abort: () => { + abortController.abort(); + cleanup(); + }, + searching, }; } diff --git a/src/plugins/data/public/search/legacy/es_client/get_es_client.ts b/src/plugins/data/public/search/legacy/es_client/get_es_client.ts deleted file mode 100644 index 4367544ad9ff4..0000000000000 --- a/src/plugins/data/public/search/legacy/es_client/get_es_client.ts +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// @ts-ignore -import { default as es } from 'elasticsearch-browser/elasticsearch'; -import { CoreStart, PackageInfo } from 'kibana/public'; -import { BehaviorSubject } from 'rxjs'; - -export function getEsClient({ - esRequestTimeout, - esApiVersion, - http, - packageVersion, -}: { - esRequestTimeout: number; - esApiVersion: string; - http: CoreStart['http']; - packageVersion: PackageInfo['version']; -}) { - // Use legacy es client for msearch. - const client = es.Client({ - host: getEsUrl(http, packageVersion), - log: 'info', - requestTimeout: esRequestTimeout, - apiVersion: esApiVersion, - }); - - const loadingCount$ = new BehaviorSubject(0); - http.addLoadingCountSource(loadingCount$); - - return { - search: wrapEsClientMethod(client, 'search', loadingCount$), - msearch: wrapEsClientMethod(client, 'msearch', loadingCount$), - create: wrapEsClientMethod(client, 'create', loadingCount$), - }; -} - -function wrapEsClientMethod(esClient: any, method: string, loadingCount$: BehaviorSubject) { - return (args: any) => { - // esClient returns a promise, with an additional abort handler - // To tap into the abort handling, we have to override that abort handler. - const customPromiseThingy = esClient[method](args); - const { abort } = customPromiseThingy; - let resolved = false; - - // Start LoadingIndicator - loadingCount$.next(loadingCount$.getValue() + 1); - - // Stop LoadingIndicator when user aborts - customPromiseThingy.abort = () => { - abort(); - if (!resolved) { - resolved = true; - loadingCount$.next(loadingCount$.getValue() - 1); - } - }; - - // Stop LoadingIndicator when promise finishes - customPromiseThingy.finally(() => { - resolved = true; - loadingCount$.next(loadingCount$.getValue() - 1); - }); - - return customPromiseThingy; - }; -} - -function getEsUrl(http: CoreStart['http'], packageVersion: PackageInfo['version']) { - const a = document.createElement('a'); - a.href = http.basePath.prepend('/elasticsearch'); - const protocolPort = /https/.test(a.protocol) ? 443 : 80; - const port = a.port || protocolPort; - return { - host: a.hostname, - port, - protocol: a.protocol, - pathname: a.pathname, - headers: { - 'kbn-version': packageVersion, - }, - }; -} diff --git a/src/plugins/data/public/search/legacy/es_client/index.ts b/src/plugins/data/public/search/legacy/es_client/index.ts deleted file mode 100644 index 78ac83af642d8..0000000000000 --- a/src/plugins/data/public/search/legacy/es_client/index.ts +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { getEsClient } from './get_es_client'; -export { LegacyApiCaller } from './types'; diff --git a/src/plugins/data/public/search/legacy/es_client/types.ts b/src/plugins/data/public/search/legacy/es_client/types.ts deleted file mode 100644 index 2d35188322a4e..0000000000000 --- a/src/plugins/data/public/search/legacy/es_client/types.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { SearchResponse } from 'elasticsearch'; -import { SearchRequest } from '../../fetch'; - -export interface LegacyApiCaller { - search: (searchRequest: SearchRequest) => LegacyApiCallerResponse; - msearch: (searchRequest: SearchRequest) => LegacyApiCallerResponse; -} - -interface LegacyApiCallerResponse extends Promise> { - abort: () => void; -} diff --git a/src/plugins/data/public/search/legacy/fetch_soon.test.ts b/src/plugins/data/public/search/legacy/fetch_soon.test.ts index d7a85e65b475d..d38a41cf5ffbc 100644 --- a/src/plugins/data/public/search/legacy/fetch_soon.test.ts +++ b/src/plugins/data/public/search/legacy/fetch_soon.test.ts @@ -19,10 +19,10 @@ import { fetchSoon } from './fetch_soon'; import { callClient } from './call_client'; -import { FetchHandlers, FetchOptions } from '../fetch/types'; +import { FetchHandlers } from '../fetch/types'; import { SearchRequest } from '../index'; import { SearchResponse } from 'elasticsearch'; -import { GetConfigFn, UI_SETTINGS } from '../../../common'; +import { GetConfigFn, UI_SETTINGS, ISearchOptions } from '../../../common'; function getConfigStub(config: any = {}): GetConfigFn { return (key) => config[key]; @@ -102,7 +102,7 @@ describe('fetchSoon', () => { const options = [{ bar: 1 }, { bar: 2 }]; requests.forEach((request, i) => { - fetchSoon(request, options[i] as FetchOptions, { config } as FetchHandlers); + fetchSoon(request, options[i] as ISearchOptions, { config } as FetchHandlers); }); jest.advanceTimersByTime(50); diff --git a/src/plugins/data/public/search/legacy/fetch_soon.ts b/src/plugins/data/public/search/legacy/fetch_soon.ts index 16920a8a4dd97..37c3827bb7bba 100644 --- a/src/plugins/data/public/search/legacy/fetch_soon.ts +++ b/src/plugins/data/public/search/legacy/fetch_soon.ts @@ -19,9 +19,9 @@ import { SearchResponse } from 'elasticsearch'; import { callClient } from './call_client'; -import { FetchHandlers, FetchOptions } from '../fetch/types'; +import { FetchHandlers } from '../fetch/types'; import { SearchRequest } from '../index'; -import { UI_SETTINGS } from '../../../common'; +import { UI_SETTINGS, ISearchOptions } from '../../../common'; /** * This function introduces a slight delay in the request process to allow multiple requests to queue @@ -29,7 +29,7 @@ import { UI_SETTINGS } from '../../../common'; */ export async function fetchSoon( request: SearchRequest, - options: FetchOptions, + options: ISearchOptions, fetchHandlers: FetchHandlers ) { const msToDelay = fetchHandlers.config.get(UI_SETTINGS.COURIER_BATCH_SEARCHES) ? 50 : 0; @@ -51,7 +51,7 @@ function delay(fn: (...args: any) => T, ms: number): Promise { // The current batch/queue of requests to fetch let requestsToFetch: SearchRequest[] = []; -let requestOptions: FetchOptions[] = []; +let requestOptions: ISearchOptions[] = []; // The in-progress fetch (if there is one) let fetchInProgress: any = null; @@ -65,7 +65,7 @@ let fetchInProgress: any = null; */ async function delayedFetch( request: SearchRequest, - options: FetchOptions, + options: ISearchOptions, fetchHandlers: FetchHandlers, ms: number ): Promise> { diff --git a/src/plugins/data/public/search/legacy/get_msearch_params.test.ts b/src/plugins/data/public/search/legacy/get_msearch_params.test.ts deleted file mode 100644 index d3206950174c8..0000000000000 --- a/src/plugins/data/public/search/legacy/get_msearch_params.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { getMSearchParams } from './get_msearch_params'; -import { GetConfigFn, UI_SETTINGS } from '../../../common'; - -function getConfigStub(config: any = {}): GetConfigFn { - return (key) => config[key]; -} - -describe('getMSearchParams', () => { - test('includes rest_total_hits_as_int', () => { - const config = getConfigStub(); - const msearchParams = getMSearchParams(config); - expect(msearchParams.rest_total_hits_as_int).toBe(true); - }); - - test('includes ignore_throttled according to search:includeFrozen', () => { - let config = getConfigStub({ [UI_SETTINGS.SEARCH_INCLUDE_FROZEN]: true }); - let msearchParams = getMSearchParams(config); - expect(msearchParams.ignore_throttled).toBe(false); - - config = getConfigStub({ [UI_SETTINGS.SEARCH_INCLUDE_FROZEN]: false }); - msearchParams = getMSearchParams(config); - expect(msearchParams.ignore_throttled).toBe(true); - }); - - test('includes max_concurrent_shard_requests according to courier:maxConcurrentShardRequests if greater than 0', () => { - let config = getConfigStub({ [UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS]: 0 }); - let msearchParams = getMSearchParams(config); - expect(msearchParams.max_concurrent_shard_requests).toBe(undefined); - - config = getConfigStub({ [UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS]: 5 }); - msearchParams = getMSearchParams(config); - expect(msearchParams.max_concurrent_shard_requests).toBe(5); - }); - - test('does not include other search params that are included in the msearch header or body', () => { - const config = getConfigStub({ - [UI_SETTINGS.SEARCH_INCLUDE_FROZEN]: false, - [UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS]: 5, - }); - const msearchParams = getMSearchParams(config); - expect(msearchParams.hasOwnProperty('ignore_unavailable')).toBe(false); - expect(msearchParams.hasOwnProperty('preference')).toBe(false); - expect(msearchParams.hasOwnProperty('timeout')).toBe(false); - }); -}); diff --git a/src/plugins/data/public/search/legacy/get_msearch_params.ts b/src/plugins/data/public/search/legacy/get_msearch_params.ts deleted file mode 100644 index c4f77b25078cd..0000000000000 --- a/src/plugins/data/public/search/legacy/get_msearch_params.ts +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { GetConfigFn } from '../../../common'; -import { getIgnoreThrottled, getMaxConcurrentShardRequests } from '../fetch'; - -export function getMSearchParams(getConfig: GetConfigFn) { - return { - rest_total_hits_as_int: true, - ignore_throttled: getIgnoreThrottled(getConfig), - max_concurrent_shard_requests: getMaxConcurrentShardRequests(getConfig), - }; -} diff --git a/src/plugins/data/public/search/legacy/index.ts b/src/plugins/data/public/search/legacy/index.ts index e2ae72824f3f4..74e516f407e8c 100644 --- a/src/plugins/data/public/search/legacy/index.ts +++ b/src/plugins/data/public/search/legacy/index.ts @@ -18,4 +18,3 @@ */ export { fetchSoon } from './fetch_soon'; -export { getEsClient, LegacyApiCaller } from './es_client'; diff --git a/src/plugins/data/public/search/mocks.ts b/src/plugins/data/public/search/mocks.ts index 8ccf46fe7c97d..f4ed7d8b122b9 100644 --- a/src/plugins/data/public/search/mocks.ts +++ b/src/plugins/data/public/search/mocks.ts @@ -35,12 +35,6 @@ function createStartContract(): jest.Mocked { aggs: searchAggsStartMock(), search: jest.fn(), searchSource: searchSourceMock, - __LEGACY: { - esClient: { - search: jest.fn(), - msearch: jest.fn(), - }, - }, }; } diff --git a/src/plugins/data/public/search/search_interceptor.test.ts b/src/plugins/data/public/search/search_interceptor.test.ts index 2eded17bda88c..84db69a83a005 100644 --- a/src/plugins/data/public/search/search_interceptor.test.ts +++ b/src/plugins/data/public/search/search_interceptor.test.ts @@ -32,15 +32,12 @@ jest.useFakeTimers(); describe('SearchInterceptor', () => { beforeEach(() => { mockCoreSetup = coreMock.createSetup(); - searchInterceptor = new SearchInterceptor( - { - toasts: mockCoreSetup.notifications.toasts, - startServices: mockCoreSetup.getStartServices(), - uiSettings: mockCoreSetup.uiSettings, - http: mockCoreSetup.http, - }, - 1000 - ); + searchInterceptor = new SearchInterceptor({ + toasts: mockCoreSetup.notifications.toasts, + startServices: mockCoreSetup.getStartServices(), + uiSettings: mockCoreSetup.uiSettings, + http: mockCoreSetup.http, + }); }); describe('search', () => { @@ -112,7 +109,9 @@ describe('SearchInterceptor', () => { const mockRequest: IEsSearchRequest = { params: {}, }; - const response = searchInterceptor.search(mockRequest, { signal: abortController.signal }); + const response = searchInterceptor.search(mockRequest, { + abortSignal: abortController.signal, + }); const next = jest.fn(); const error = (e: any) => { @@ -131,7 +130,7 @@ describe('SearchInterceptor', () => { const mockRequest: IEsSearchRequest = { params: {}, }; - const response = searchInterceptor.search(mockRequest, { signal: abort.signal }); + const response = searchInterceptor.search(mockRequest, { abortSignal: abort.signal }); abort.abort(); const error = (e: any) => { diff --git a/src/plugins/data/public/search/search_interceptor.ts b/src/plugins/data/public/search/search_interceptor.ts index 30e509edd4987..0a6d60afed2f7 100644 --- a/src/plugins/data/public/search/search_interceptor.ts +++ b/src/plugins/data/public/search/search_interceptor.ts @@ -18,12 +18,25 @@ */ import { trimEnd } from 'lodash'; -import { BehaviorSubject, throwError, timer, Subscription, defer, from, Observable } from 'rxjs'; +import { + BehaviorSubject, + throwError, + timer, + Subscription, + defer, + from, + Observable, + NEVER, +} from 'rxjs'; import { finalize, filter } from 'rxjs/operators'; import { Toast, CoreStart, ToastsSetup, CoreSetup } from 'kibana/public'; import { getCombinedSignal, AbortError } from '../../common/utils'; -import { IEsSearchRequest, IEsSearchResponse, ES_SEARCH_STRATEGY } from '../../common/search'; -import { ISearchOptions } from './types'; +import { + IEsSearchRequest, + IEsSearchResponse, + ISearchOptions, + ES_SEARCH_STRATEGY, +} from '../../common/search'; import { getLongQueryNotification } from './long_query_notification'; import { SearchUsageCollector } from './collectors'; @@ -67,17 +80,10 @@ export class SearchInterceptor { */ protected application!: CoreStart['application']; - /** - * This class should be instantiated with a `requestTimeout` corresponding with how many ms after - * requests are initiated that they should automatically cancel. - * @param toasts The `core.notifications.toasts` service - * @param application The `core.application` service - * @param requestTimeout Usually config value `elasticsearch.requestTimeout` + /* + * @internal */ - constructor( - protected readonly deps: SearchInterceptorDeps, - protected readonly requestTimeout?: number - ) { + constructor(protected readonly deps: SearchInterceptorDeps) { this.deps.http.addLoadingCountSource(this.pendingCount$); this.deps.startServices.then(([coreStart]) => { @@ -90,7 +96,6 @@ export class SearchInterceptor { .pipe(filter((count) => count === 0)) .subscribe(this.hideToast); } - /** * Returns an `Observable` over the current number of pending searches. This could mean that one * of the search requests is still in flight, or that it has only received partial responses. @@ -99,6 +104,9 @@ export class SearchInterceptor { return this.pendingCount$.asObservable(); } + /** + * @internal + */ protected runSearch( request: IEsSearchRequest, signal: AbortSignal, @@ -128,11 +136,13 @@ export class SearchInterceptor { ): Observable { // Defer the following logic until `subscribe` is actually called return defer(() => { - if (options?.signal?.aborted) { + if (options?.abortSignal?.aborted) { return throwError(new AbortError()); } - const { combinedSignal, cleanup } = this.setupTimers(options); + const { combinedSignal, cleanup } = this.setupAbortSignal({ + abortSignal: options?.abortSignal, + }); this.pendingCount$.next(this.pendingCount$.getValue() + 1); return this.runSearch(request, combinedSignal, options?.strategy).pipe( @@ -144,11 +154,20 @@ export class SearchInterceptor { }); } - protected setupTimers(options?: ISearchOptions) { + /** + * @internal + */ + protected setupAbortSignal({ + abortSignal, + timeout, + }: { + abortSignal?: AbortSignal; + timeout?: number; + }) { // Schedule this request to automatically timeout after some interval const timeoutController = new AbortController(); const { signal: timeoutSignal } = timeoutController; - const timeout$ = timer(this.requestTimeout); + const timeout$ = timeout ? timer(timeout) : NEVER; const subscription = timeout$.subscribe(() => { timeoutController.abort(); }); @@ -164,7 +183,7 @@ export class SearchInterceptor { const signals = [ this.abortController.signal, timeoutSignal, - ...(options?.signal ? [options.signal] : []), + ...(abortSignal ? [abortSignal] : []), ]; const combinedSignal = getCombinedSignal(signals); diff --git a/src/plugins/data/public/search/search_service.ts b/src/plugins/data/public/search/search_service.ts index 9a30a15936fe5..f8f4acbe43dfd 100644 --- a/src/plugins/data/public/search/search_service.ts +++ b/src/plugins/data/public/search/search_service.ts @@ -17,11 +17,11 @@ * under the License. */ -import { Plugin, CoreSetup, CoreStart, PackageInfo } from 'src/core/public'; +import { Plugin, CoreSetup, CoreStart } from 'src/core/public'; +import { BehaviorSubject } from 'rxjs'; import { ISearchSetup, ISearchStart, SearchEnhancements } from './types'; import { createSearchSource, SearchSource, SearchSourceDependencies } from './search_source'; -import { getEsClient, LegacyApiCaller } from './legacy'; import { AggsService, AggsStartDependencies } from './aggs'; import { IndexPatternsContract } from '../index_patterns/index_patterns'; import { ISearchInterceptor, SearchInterceptor } from './search_interceptor'; @@ -33,9 +33,8 @@ import { ExpressionsSetup } from '../../../expressions/public'; /** @internal */ export interface SearchServiceSetupDependencies { - packageInfo: PackageInfo; - usageCollection?: UsageCollectionSetup; expressions: ExpressionsSetup; + usageCollection?: UsageCollectionSetup; } /** @internal */ @@ -45,44 +44,27 @@ export interface SearchServiceStartDependencies { } export class SearchService implements Plugin { - private esClient?: LegacyApiCaller; private readonly aggsService = new AggsService(); private searchInterceptor!: ISearchInterceptor; private usageCollector?: SearchUsageCollector; public setup( { http, getStartServices, injectedMetadata, notifications, uiSettings }: CoreSetup, - { expressions, packageInfo, usageCollection }: SearchServiceSetupDependencies + { expressions, usageCollection }: SearchServiceSetupDependencies ): ISearchSetup { - const esApiVersion = injectedMetadata.getInjectedVar('esApiVersion') as string; - const esRequestTimeout = injectedMetadata.getInjectedVar('esRequestTimeout') as number; - const packageVersion = packageInfo.version; - this.usageCollector = createUsageCollector(getStartServices, usageCollection); - this.esClient = getEsClient({ - esRequestTimeout, - esApiVersion, - http, - packageVersion, - }); - /** * A global object that intercepts all searches and provides convenience methods for cancelling * all pending search requests, as well as getting the number of pending search requests. - * TODO: Make this modular so that apps can opt in/out of search collection, or even provide - * their own search collector instances */ - this.searchInterceptor = new SearchInterceptor( - { - toasts: notifications.toasts, - http, - uiSettings, - startServices: getStartServices(), - usageCollector: this.usageCollector!, - }, - esRequestTimeout - ); + this.searchInterceptor = new SearchInterceptor({ + toasts: notifications.toasts, + http, + uiSettings, + startServices: getStartServices(), + usageCollector: this.usageCollector!, + }); expressions.registerFunction(esdsl); expressions.registerType(esRawResponse); @@ -107,15 +89,14 @@ export class SearchService implements Plugin { return this.searchInterceptor.search(request, options); }) as ISearchGeneric; - const legacySearch = { - esClient: this.esClient!, - }; + const loadingCount$ = new BehaviorSubject(0); + http.addLoadingCountSource(loadingCount$); const searchSourceDependencies: SearchSourceDependencies = { getConfig: uiSettings.get.bind(uiSettings), - esShardTimeout: injectedMetadata.getInjectedVar('esShardTimeout') as number, search, - legacySearch, + http, + loadingCount$, }; return { @@ -127,7 +108,6 @@ export class SearchService implements Plugin { return new SearchSource({}, searchSourceDependencies); }, }, - __LEGACY: legacySearch, }; } diff --git a/src/plugins/data/public/search/search_source/create_search_source.test.ts b/src/plugins/data/public/search/search_source/create_search_source.test.ts index 56f6ca6c56270..bc1c7c06c8806 100644 --- a/src/plugins/data/public/search/search_source/create_search_source.test.ts +++ b/src/plugins/data/public/search/search_source/create_search_source.test.ts @@ -22,7 +22,8 @@ import { SearchSourceDependencies } from './search_source'; import { IIndexPattern } from '../../../common/index_patterns'; import { IndexPatternsContract } from '../../index_patterns/index_patterns'; import { Filter } from '../../../common/es_query/filters'; -import { dataPluginMock } from '../../mocks'; +import { coreMock } from '../../../../../core/public/mocks'; +import { BehaviorSubject } from 'rxjs'; describe('createSearchSource', () => { const indexPatternMock: IIndexPattern = {} as IIndexPattern; @@ -31,13 +32,11 @@ describe('createSearchSource', () => { let createSearchSource: ReturnType; beforeEach(() => { - const data = dataPluginMock.createStartContract(); - dependencies = { getConfig: jest.fn(), search: jest.fn(), - legacySearch: data.search.__LEGACY, - esShardTimeout: 30000, + http: coreMock.createStart().http, + loadingCount$: new BehaviorSubject(0), }; indexPatternContractMock = ({ diff --git a/src/plugins/data/public/search/search_source/mocks.ts b/src/plugins/data/public/search/search_source/mocks.ts index 4e1c35557ffa6..adf53bee33fe1 100644 --- a/src/plugins/data/public/search/search_source/mocks.ts +++ b/src/plugins/data/public/search/search_source/mocks.ts @@ -17,7 +17,8 @@ * under the License. */ -import { uiSettingsServiceMock } from '../../../../../core/public/mocks'; +import { BehaviorSubject } from 'rxjs'; +import { httpServiceMock, uiSettingsServiceMock } from '../../../../../core/public/mocks'; import { ISearchSource, SearchSource } from './search_source'; import { SearchSourceFields } from './types'; @@ -52,12 +53,7 @@ export const searchSourceMock = { export const createSearchSourceMock = (fields?: SearchSourceFields) => new SearchSource(fields, { getConfig: uiSettingsServiceMock.createStartContract().get, - esShardTimeout: 30000, search: jest.fn(), - legacySearch: { - esClient: { - search: jest.fn(), - msearch: jest.fn(), - }, - }, + http: httpServiceMock.createStartContract(), + loadingCount$: new BehaviorSubject(0), }); diff --git a/src/plugins/data/public/search/search_source/search_source.test.ts b/src/plugins/data/public/search/search_source/search_source.test.ts index 2f0fa0765e25a..282a33e6d01f7 100644 --- a/src/plugins/data/public/search/search_source/search_source.test.ts +++ b/src/plugins/data/public/search/search_source/search_source.test.ts @@ -17,12 +17,12 @@ * under the License. */ -import { Observable } from 'rxjs'; +import { Observable, BehaviorSubject } from 'rxjs'; import { GetConfigFn } from 'src/plugins/data/common'; import { SearchSource, SearchSourceDependencies } from './search_source'; import { IndexPattern, SortDirection } from '../..'; import { fetchSoon } from '../legacy'; -import { dataPluginMock } from '../../../../data/public/mocks'; +import { coreMock } from '../../../../../core/public/mocks'; jest.mock('../legacy', () => ({ fetchSoon: jest.fn().mockResolvedValue({}), @@ -54,8 +54,6 @@ describe('SearchSource', () => { let searchSourceDependencies: SearchSourceDependencies; beforeEach(() => { - const data = dataPluginMock.createStartContract(); - mockSearchMethod = jest.fn(() => { return new Observable((subscriber) => { setTimeout(() => { @@ -70,8 +68,8 @@ describe('SearchSource', () => { searchSourceDependencies = { getConfig: jest.fn(), search: mockSearchMethod, - legacySearch: data.search.__LEGACY, - esShardTimeout: 30000, + http: coreMock.createStart().http, + loadingCount$: new BehaviorSubject(0), }; }); diff --git a/src/plugins/data/public/search/search_source/search_source.ts b/src/plugins/data/public/search/search_source/search_source.ts index d2e3370762059..68c7b663b3628 100644 --- a/src/plugins/data/public/search/search_source/search_source.ts +++ b/src/plugins/data/public/search/search_source/search_source.ts @@ -72,25 +72,31 @@ import { setWith } from '@elastic/safer-lodash-set'; import { uniqueId, uniq, extend, pick, difference, omit, isObject, keys, isFunction } from 'lodash'; import { map } from 'rxjs/operators'; +import { HttpStart } from 'src/core/public'; +import { BehaviorSubject } from 'rxjs'; import { normalizeSortRequest } from './normalize_sort_request'; import { filterDocvalueFields } from './filter_docvalue_fields'; import { fieldWildcardFilter } from '../../../../kibana_utils/common'; import { IIndexPattern, ISearchGeneric } from '../..'; import { SearchSourceOptions, SearchSourceFields } from './types'; import { - FetchOptions, RequestFailure, handleResponse, getSearchParamsFromRequest, SearchRequest, } from '../fetch'; -import { getEsQueryConfig, buildEsQuery, Filter, UI_SETTINGS } from '../../../common'; +import { + getEsQueryConfig, + buildEsQuery, + Filter, + UI_SETTINGS, + ISearchOptions, +} from '../../../common'; import { getHighlightRequest } from '../../../common/field_formats'; import { GetConfigFn } from '../../../common/types'; import { fetchSoon } from '../legacy'; import { extractReferences } from './extract_references'; -import { ISearchStartLegacy } from '../types'; /** @internal */ export const searchSourceRequiredUiSettings = [ @@ -111,8 +117,8 @@ export const searchSourceRequiredUiSettings = [ export interface SearchSourceDependencies { getConfig: GetConfigFn; search: ISearchGeneric; - legacySearch: ISearchStartLegacy; - esShardTimeout: number; + http: HttpStart; + loadingCount$: BehaviorSubject; } /** @public **/ @@ -121,7 +127,7 @@ export class SearchSource { private searchStrategyId?: string; private parent?: SearchSource; private requestStartHandlers: Array< - (searchSource: SearchSource, options?: FetchOptions) => Promise + (searchSource: SearchSource, options?: ISearchOptions) => Promise > = []; private inheritOptions: SearchSourceOptions = {}; public history: SearchRequest[] = []; @@ -225,15 +231,14 @@ export class SearchSource { * Run a search using the search service * @return {Observable>} */ - private fetch$(searchRequest: SearchRequest, signal?: AbortSignal) { - const { search, esShardTimeout, getConfig } = this.dependencies; + private fetch$(searchRequest: SearchRequest, options: ISearchOptions) { + const { search, getConfig } = this.dependencies; const params = getSearchParamsFromRequest(searchRequest, { - esShardTimeout, getConfig, }); - return search({ params, indexType: searchRequest.indexType }, { signal }).pipe( + return search({ params, indexType: searchRequest.indexType }, options).pipe( map(({ rawResponse }) => handleResponse(searchRequest, rawResponse)) ); } @@ -242,8 +247,8 @@ export class SearchSource { * Run a search using the search service * @return {Promise>} */ - private async legacyFetch(searchRequest: SearchRequest, options: FetchOptions) { - const { esShardTimeout, legacySearch, getConfig } = this.dependencies; + private async legacyFetch(searchRequest: SearchRequest, options: ISearchOptions) { + const { http, getConfig, loadingCount$ } = this.dependencies; return await fetchSoon( searchRequest, @@ -252,9 +257,9 @@ export class SearchSource { ...options, }, { - legacySearchService: legacySearch, + http, config: { get: getConfig }, - esShardTimeout, + loadingCount$, } ); } @@ -263,7 +268,7 @@ export class SearchSource { * * @async */ - async fetch(options: FetchOptions = {}) { + async fetch(options: ISearchOptions = {}) { const { getConfig } = this.dependencies; await this.requestIsStarting(options); @@ -274,7 +279,7 @@ export class SearchSource { if (getConfig(UI_SETTINGS.COURIER_BATCH_SEARCHES)) { response = await this.legacyFetch(searchRequest, options); } else { - response = await this.fetch$(searchRequest, options.abortSignal).toPromise(); + response = await this.fetch$(searchRequest, options).toPromise(); } // TODO: Remove casting when https://github.com/elastic/elasticsearch-js/issues/1287 is resolved @@ -291,7 +296,7 @@ export class SearchSource { * @return {undefined} */ onRequestStart( - handler: (searchSource: SearchSource, options?: FetchOptions) => Promise + handler: (searchSource: SearchSource, options?: ISearchOptions) => Promise ) { this.requestStartHandlers.push(handler); } @@ -318,7 +323,7 @@ export class SearchSource { * @param options * @return {Promise} */ - private requestIsStarting(options: FetchOptions = {}) { + private requestIsStarting(options: ISearchOptions = {}) { const handlers = [...this.requestStartHandlers]; // If callParentStartHandlers has been set to true, we also call all // handlers of parent search sources. diff --git a/src/plugins/data/public/search/types.ts b/src/plugins/data/public/search/types.ts index 55726e40f5a77..cec5c63294e96 100644 --- a/src/plugins/data/public/search/types.ts +++ b/src/plugins/data/public/search/types.ts @@ -19,7 +19,6 @@ import { Observable } from 'rxjs'; import { PackageInfo } from 'kibana/server'; -import { LegacyApiCaller } from './legacy/es_client'; import { ISearchInterceptor } from './search_interceptor'; import { ISearchSource, SearchSourceFields } from './search_source'; import { SearchUsageCollector } from './collectors'; @@ -29,15 +28,11 @@ import { IKibanaSearchResponse, IEsSearchRequest, IEsSearchResponse, + ISearchOptions, } from '../../common/search'; import { IndexPatternsContract } from '../../common/index_patterns/index_patterns'; import { UsageCollectionSetup } from '../../../usage_collection/public'; -export interface ISearchOptions { - signal?: AbortSignal; - strategy?: string; -} - export type ISearch = ( request: IKibanaSearchRequest, options?: ISearchOptions @@ -51,10 +46,6 @@ export type ISearchGeneric = < options?: ISearchOptions ) => Observable; -export interface ISearchStartLegacy { - esClient: LegacyApiCaller; -} - export interface SearchEnhancements { searchInterceptor: ISearchInterceptor; } @@ -78,11 +69,6 @@ export interface ISearchStart { create: (fields?: SearchSourceFields) => Promise; createEmpty: () => ISearchSource; }; - /** - * @deprecated - * @internal - */ - __LEGACY: ISearchStartLegacy; } export { SEARCH_EVENT_TYPE } from './collectors'; diff --git a/src/plugins/data/public/test_utils.ts b/src/plugins/data/public/test_utils.ts new file mode 100644 index 0000000000000..f04b20e96fa1d --- /dev/null +++ b/src/plugins/data/public/test_utils.ts @@ -0,0 +1,20 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { getFieldFormatsRegistry } from './field_formats/field_formats_registry.stub'; diff --git a/src/plugins/data/public/ui/query_string_input/query_string_input.tsx b/src/plugins/data/public/ui/query_string_input/query_string_input.tsx index 86ee98b7af9d8..0bfac2a07a7eb 100644 --- a/src/plugins/data/public/ui/query_string_input/query_string_input.tsx +++ b/src/plugins/data/public/ui/query_string_input/query_string_input.tsx @@ -17,10 +17,9 @@ * under the License. */ -import { Component } from 'react'; -import React from 'react'; +import React, { Component, RefObject, createRef } from 'react'; import { i18n } from '@kbn/i18n'; - +import classNames from 'classnames'; import { EuiTextArea, EuiOutsideClickDetector, @@ -30,6 +29,7 @@ import { EuiButton, EuiLink, htmlIdGenerator, + EuiPortal, } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; @@ -42,6 +42,7 @@ import { withKibana, KibanaReactContextValue, toMountPoint } from '../../../../k import { fetchIndexPatterns } from './fetch_index_patterns'; import { QueryLanguageSwitcher } from './language_switcher'; import { PersistedLog, getQueryLog, matchPairs, toUser, fromUser } from '../../query'; +import { SuggestionsListSize } from '../typeahead/suggestions_component'; import { SuggestionsComponent } from '..'; interface Props { @@ -60,6 +61,8 @@ interface Props { onChangeQueryInputFocus?: (isFocused: boolean) => void; onSubmit?: (query: Query) => void; dataTestSubj?: string; + size?: SuggestionsListSize; + className?: string; } interface State { @@ -70,6 +73,7 @@ interface State { selectionStart: number | null; selectionEnd: number | null; indexPatterns: IIndexPattern[]; + queryBarRect: DOMRect | undefined; } const KEY_CODES = { @@ -93,6 +97,7 @@ export class QueryStringInputUI extends Component { selectionStart: null, selectionEnd: null, indexPatterns: [], + queryBarRect: undefined, }; public inputRef: HTMLTextAreaElement | null = null; @@ -101,6 +106,7 @@ export class QueryStringInputUI extends Component { private abortController?: AbortController; private services = this.props.kibana.services; private componentIsUnmounting = false; + private queryBarInputDivRefInstance: RefObject = createRef(); private getQueryString = () => { return toUser(this.props.query.query); @@ -494,8 +500,13 @@ export class QueryStringInputUI extends Component { this.initPersistedLog(); this.fetchIndexPatterns().then(this.updateSuggestions); + this.handleListUpdate(); window.addEventListener('resize', this.handleAutoHeight); + window.addEventListener('scroll', this.handleListUpdate, { + passive: true, // for better performance as we won't call preventDefault + capture: true, // scroll events don't bubble, they must be captured instead + }); } public componentDidUpdate(prevProps: Props) { @@ -533,12 +544,19 @@ export class QueryStringInputUI extends Component { this.updateSuggestions.cancel(); this.componentIsUnmounting = true; window.removeEventListener('resize', this.handleAutoHeight); + window.removeEventListener('scroll', this.handleListUpdate); } + handleListUpdate = () => + this.setState({ + queryBarRect: this.queryBarInputDivRefInstance.current?.getBoundingClientRect(), + }); + handleAutoHeight = () => { if (this.inputRef !== null && document.activeElement === this.inputRef) { this.inputRef.style.setProperty('height', `${this.inputRef.scrollHeight}px`, 'important'); } + this.handleListUpdate(); }; handleRemoveHeight = () => { @@ -569,9 +587,12 @@ export class QueryStringInputUI extends Component { 'aria-owns': 'kbnTypeahead__items', }; const ariaCombobox = { ...isSuggestionsVisible, role: 'combobox' }; - + const className = classNames( + 'euiFormControlLayout euiFormControlLayout--group kbnQueryBar__wrap', + this.props.className + ); return ( -
+
{this.props.prepend}
{
{ {this.getQueryString()}
- - + + +
diff --git a/src/plugins/data/public/ui/typeahead/__snapshots__/suggestions_component.test.tsx.snap b/src/plugins/data/public/ui/typeahead/__snapshots__/suggestions_component.test.tsx.snap index 3b51c1db50d00..2fa7834872f6b 100644 --- a/src/plugins/data/public/ui/typeahead/__snapshots__/suggestions_component.test.tsx.snap +++ b/src/plugins/data/public/ui/typeahead/__snapshots__/suggestions_component.test.tsx.snap @@ -1,17 +1,21 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`SuggestionsComponent Passing the index should control which suggestion is selected 1`] = ` -
-
+ `; exports[`SuggestionsComponent Should display given suggestions if the show prop is true 1`] = ` -
-
+ `; diff --git a/src/plugins/data/public/ui/typeahead/_suggestion.scss b/src/plugins/data/public/ui/typeahead/_suggestion.scss index 81c05f1a8a78c..67ff17d017053 100644 --- a/src/plugins/data/public/ui/typeahead/_suggestion.scss +++ b/src/plugins/data/public/ui/typeahead/_suggestion.scss @@ -6,28 +6,36 @@ $kbnTypeaheadTypes: ( conjunction: $euiColorVis3, ); +.kbnTypeahead.kbnTypeahead--small { + max-height: 20vh; +} + +.kbnTypeahead__popover--top { + @include euiBottomShadowFlat; + border-top-left-radius: $euiBorderRadius; + border-top-right-radius: $euiBorderRadius; +} + +.kbnTypeahead__popover--bottom { + @include euiBottomShadow($adjustBorders: true); + border-bottom-left-radius: $euiBorderRadius; + border-bottom-right-radius: $euiBorderRadius; +} + .kbnTypeahead { - position: relative; + max-height: 60vh; .kbnTypeahead__popover { - @include euiBottomShadow($adjustBorders: true); + max-height: inherit; + @include euiScrollBar; border: 1px solid; border-color: $euiBorderColor; color: $euiTextColor; background-color: $euiColorEmptyShade; - position: absolute; - top: -2px; + position: relative; z-index: $euiZContentMenu; width: 100%; - border-bottom-left-radius: $euiBorderRadius; - border-bottom-right-radius: $euiBorderRadius; - - .kbnTypeahead__items { - @include euiScrollBar; - - max-height: 60vh; - overflow-y: auto; - } + overflow-y: auto; .kbnTypeahead__item { height: $euiSizeXL; diff --git a/src/plugins/data/public/ui/typeahead/constants.ts b/src/plugins/data/public/ui/typeahead/constants.ts new file mode 100644 index 0000000000000..08f9bd23e16f3 --- /dev/null +++ b/src/plugins/data/public/ui/typeahead/constants.ts @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Minimum width in px to display suggestion description correctly + * @public + */ +export const SUGGESTIONS_LIST_REQUIRED_WIDTH = 600; + +/** + * Minimum bottom distance in px to display list of suggestions + * @public + */ +export const SUGGESTIONS_LIST_REQUIRED_BOTTOM_SPACE = 250; + +/** + * A distance in px to display suggestions list right under the query input without a gap + * @public + */ +export const SUGGESTIONS_LIST_REQUIRED_TOP_OFFSET = 2; diff --git a/src/plugins/data/public/ui/typeahead/suggestion_component.test.tsx b/src/plugins/data/public/ui/typeahead/suggestion_component.test.tsx index 9fe33b003527e..ba78bdd802601 100644 --- a/src/plugins/data/public/ui/typeahead/suggestion_component.test.tsx +++ b/src/plugins/data/public/ui/typeahead/suggestion_component.test.tsx @@ -44,6 +44,7 @@ describe('SuggestionComponent', () => { suggestion={mockSuggestion} innerRef={noop} ariaId={'suggestion-1'} + shouldDisplayDescription={true} /> ); @@ -59,6 +60,7 @@ describe('SuggestionComponent', () => { suggestion={mockSuggestion} innerRef={noop} ariaId={'suggestion-1'} + shouldDisplayDescription={true} /> ); @@ -79,6 +81,7 @@ describe('SuggestionComponent', () => { suggestion={mockSuggestion} innerRef={innerRefCallback} ariaId={'suggestion-1'} + shouldDisplayDescription={true} /> ); }); @@ -94,6 +97,7 @@ describe('SuggestionComponent', () => { suggestion={mockSuggestion} innerRef={noop} ariaId={'suggestion-1'} + shouldDisplayDescription={true} /> ); @@ -113,6 +117,7 @@ describe('SuggestionComponent', () => { suggestion={mockSuggestion} innerRef={noop} ariaId={'suggestion-1'} + shouldDisplayDescription={true} /> ); diff --git a/src/plugins/data/public/ui/typeahead/suggestion_component.tsx b/src/plugins/data/public/ui/typeahead/suggestion_component.tsx index b859428e6ed7e..724287b874bf7 100644 --- a/src/plugins/data/public/ui/typeahead/suggestion_component.tsx +++ b/src/plugins/data/public/ui/typeahead/suggestion_component.tsx @@ -46,6 +46,7 @@ interface Props { suggestion: QuerySuggestion; innerRef: (node: HTMLDivElement) => void; ariaId: string; + shouldDisplayDescription: boolean; } export function SuggestionComponent(props: Props) { @@ -72,7 +73,9 @@ export function SuggestionComponent(props: Props) {
{props.suggestion.text}
-
{props.suggestion.description}
+ {props.shouldDisplayDescription && ( +
{props.suggestion.description}
+ )}
); diff --git a/src/plugins/data/public/ui/typeahead/suggestions_component.test.tsx b/src/plugins/data/public/ui/typeahead/suggestions_component.test.tsx index 011a729c6a616..583940015c152 100644 --- a/src/plugins/data/public/ui/typeahead/suggestions_component.test.tsx +++ b/src/plugins/data/public/ui/typeahead/suggestions_component.test.tsx @@ -54,6 +54,7 @@ describe('SuggestionsComponent', () => { show={false} suggestions={mockSuggestions} loadMore={noop} + queryBarRect={{ top: 0 } as DOMRect} /> ); @@ -69,6 +70,7 @@ describe('SuggestionsComponent', () => { show={true} suggestions={[]} loadMore={noop} + queryBarRect={{ top: 0 } as DOMRect} /> ); @@ -84,6 +86,7 @@ describe('SuggestionsComponent', () => { show={true} suggestions={mockSuggestions} loadMore={noop} + queryBarRect={{ top: 0 } as DOMRect} /> ); @@ -100,6 +103,7 @@ describe('SuggestionsComponent', () => { show={true} suggestions={mockSuggestions} loadMore={noop} + queryBarRect={{ top: 0 } as DOMRect} /> ); @@ -116,6 +120,7 @@ describe('SuggestionsComponent', () => { show={true} suggestions={mockSuggestions} loadMore={noop} + queryBarRect={{ top: 0 } as DOMRect} /> ); @@ -134,6 +139,7 @@ describe('SuggestionsComponent', () => { show={true} suggestions={mockSuggestions} loadMore={noop} + queryBarRect={{ top: 0 } as DOMRect} /> ); diff --git a/src/plugins/data/public/ui/typeahead/suggestions_component.tsx b/src/plugins/data/public/ui/typeahead/suggestions_component.tsx index 77dd7dcec01ee..dc7c55374f1d5 100644 --- a/src/plugins/data/public/ui/typeahead/suggestions_component.tsx +++ b/src/plugins/data/public/ui/typeahead/suggestions_component.tsx @@ -19,8 +19,15 @@ import { isEmpty } from 'lodash'; import React, { Component } from 'react'; +import classNames from 'classnames'; +import styled from 'styled-components'; import { QuerySuggestion } from '../../autocomplete'; import { SuggestionComponent } from './suggestion_component'; +import { + SUGGESTIONS_LIST_REQUIRED_BOTTOM_SPACE, + SUGGESTIONS_LIST_REQUIRED_TOP_OFFSET, + SUGGESTIONS_LIST_REQUIRED_WIDTH, +} from './constants'; interface Props { index: number | null; @@ -29,18 +36,24 @@ interface Props { show: boolean; suggestions: QuerySuggestion[]; loadMore: () => void; + queryBarRect?: DOMRect; + size?: SuggestionsListSize; } +export type SuggestionsListSize = 's' | 'l'; + export class SuggestionsComponent extends Component { private childNodes: HTMLDivElement[] = []; private parentNode: HTMLDivElement | null = null; public render() { - if (!this.props.show || isEmpty(this.props.suggestions)) { + if (!this.props.queryBarRect || !this.props.show || isEmpty(this.props.suggestions)) { return null; } const suggestions = this.props.suggestions.map((suggestion, index) => { + const isDescriptionFittable = + this.props.queryBarRect!.width >= SUGGESTIONS_LIST_REQUIRED_WIDTH; return ( (this.childNodes[index] = node)} @@ -50,17 +63,38 @@ export class SuggestionsComponent extends Component { onMouseEnter={() => this.props.onMouseEnter(index)} ariaId={'suggestion-' + index} key={`${suggestion.type} - ${suggestion.text}`} + shouldDisplayDescription={isDescriptionFittable} /> ); }); + const documentHeight = document.documentElement.clientHeight || window.innerHeight; + const { queryBarRect } = this.props; + + // reflects if the suggestions list has enough space below to be opened down + const isSuggestionsListFittable = + documentHeight - (queryBarRect.top + queryBarRect.height) > + SUGGESTIONS_LIST_REQUIRED_BOTTOM_SPACE; + const verticalListPosition = isSuggestionsListFittable + ? `top: ${window.scrollY + queryBarRect.bottom - SUGGESTIONS_LIST_REQUIRED_TOP_OFFSET}px;` + : `bottom: ${documentHeight - (window.scrollY + queryBarRect.top)}px;`; + return ( -
-
-
+ +
+
(this.parentNode = node)} onScroll={this.handleScroll} @@ -69,7 +103,7 @@ export class SuggestionsComponent extends Component {
-
+ ); } @@ -116,3 +150,11 @@ export class SuggestionsComponent extends Component { } }; } + +const StyledSuggestionsListDiv = styled.div` + ${(props: { queryBarRect: DOMRect; verticalListPosition: string }) => ` + position: absolute; + left: ${props.queryBarRect.left}px; + width: ${props.queryBarRect.width}px; + ${props.verticalListPosition}`} +`; diff --git a/src/plugins/data/server/index.ts b/src/plugins/data/server/index.ts index c3b06992dba0e..03baff4910309 100644 --- a/src/plugins/data/server/index.ts +++ b/src/plugins/data/server/index.ts @@ -198,8 +198,10 @@ export { OptionedValueProp, ParsedInterval, // search + ISearchOptions, IEsSearchRequest, IEsSearchResponse, + ES_SEARCH_STRATEGY, // tabify TabbedAggColumn, TabbedAggRow, @@ -208,11 +210,13 @@ export { export { ISearchStrategy, - ISearchOptions, ISearchSetup, ISearchStart, + toSnakeCase, getDefaultSearchParams, + getShardTimeout, getTotalLoaded, + shimHitsTotal, usageProvider, SearchUsage, } from './search'; diff --git a/src/plugins/data/server/search/es_search/es_search_strategy.test.ts b/src/plugins/data/server/search/es_search/es_search_strategy.test.ts index c34c3a310814c..504ce728481f0 100644 --- a/src/plugins/data/server/search/es_search/es_search_strategy.test.ts +++ b/src/plugins/data/server/search/es_search/es_search_strategy.test.ts @@ -36,7 +36,14 @@ describe('ES search strategy', () => { }, }); const mockContext = { - core: { elasticsearch: { client: { asCurrentUser: { search: mockApiCaller } } } }, + core: { + uiSettings: { + client: { + get: () => {}, + }, + }, + elasticsearch: { client: { asCurrentUser: { search: mockApiCaller } } }, + }, }; const mockConfig$ = pluginInitializerContextConfigMock({}).legacy.globalConfig$; @@ -59,14 +66,13 @@ describe('ES search strategy', () => { expect(mockApiCaller).toBeCalled(); expect(mockApiCaller.mock.calls[0][0]).toEqual({ ...params, - timeout: '0ms', - ignoreUnavailable: true, - restTotalHitsAsInt: true, + ignore_unavailable: true, + track_total_hits: true, }); }); it('calls the API caller with overridden defaults', async () => { - const params = { index: 'logstash-*', ignoreUnavailable: false, timeout: '1000ms' }; + const params = { index: 'logstash-*', ignore_unavailable: false, timeout: '1000ms' }; const esSearch = await esSearchStrategyProvider(mockConfig$, mockLogger); await esSearch.search((mockContext as unknown) as RequestHandlerContext, { params }); @@ -74,7 +80,7 @@ describe('ES search strategy', () => { expect(mockApiCaller).toBeCalled(); expect(mockApiCaller.mock.calls[0][0]).toEqual({ ...params, - restTotalHitsAsInt: true, + track_total_hits: true, }); }); diff --git a/src/plugins/data/server/search/es_search/es_search_strategy.ts b/src/plugins/data/server/search/es_search/es_search_strategy.ts index eabbf3e3e2600..106f974ed3457 100644 --- a/src/plugins/data/server/search/es_search/es_search_strategy.ts +++ b/src/plugins/data/server/search/es_search/es_search_strategy.ts @@ -22,7 +22,8 @@ import { SearchResponse } from 'elasticsearch'; import { Observable } from 'rxjs'; import { ApiResponse } from '@elastic/elasticsearch'; import { SearchUsage } from '../collectors/usage'; -import { ISearchStrategy, getDefaultSearchParams, getTotalLoaded } from '..'; +import { toSnakeCase } from './to_snake_case'; +import { ISearchStrategy, getDefaultSearchParams, getTotalLoaded, getShardTimeout } from '..'; export const esSearchStrategyProvider = ( config$: Observable, @@ -33,7 +34,7 @@ export const esSearchStrategyProvider = ( search: async (context, request, options) => { logger.debug(`search ${request.params?.index}`); const config = await config$.pipe(first()).toPromise(); - const defaultParams = getDefaultSearchParams(config); + const uiSettingsClient = await context.core.uiSettings.client; // Only default index pattern type is supported here. // See data_enhanced for other type support. @@ -41,10 +42,14 @@ export const esSearchStrategyProvider = ( throw new Error(`Unsupported index pattern type ${request.indexType}`); } - const params = { + // ignoreThrottled is not supported in OSS + const { ignoreThrottled, ...defaultParams } = await getDefaultSearchParams(uiSettingsClient); + + const params = toSnakeCase({ ...defaultParams, + ...getShardTimeout(config), ...request.params, - }; + }); try { const esResponse = (await context.core.elasticsearch.client.asCurrentUser.search( diff --git a/src/plugins/data/server/search/es_search/get_default_search_params.ts b/src/plugins/data/server/search/es_search/get_default_search_params.ts index b2341ccc0f3c8..13607fce51670 100644 --- a/src/plugins/data/server/search/es_search/get_default_search_params.ts +++ b/src/plugins/data/server/search/es_search/get_default_search_params.ts @@ -17,12 +17,28 @@ * under the License. */ -import { SharedGlobalConfig } from '../../../../../core/server'; +import { SharedGlobalConfig, IUiSettingsClient } from '../../../../../core/server'; +import { UI_SETTINGS } from '../../../common/constants'; -export function getDefaultSearchParams(config: SharedGlobalConfig) { +export function getShardTimeout(config: SharedGlobalConfig) { + const timeout = config.elasticsearch.shardTimeout.asMilliseconds(); + return timeout + ? { + timeout: `${timeout}ms`, + } + : {}; +} + +export async function getDefaultSearchParams(uiSettingsClient: IUiSettingsClient) { + const ignoreThrottled = !(await uiSettingsClient.get(UI_SETTINGS.SEARCH_INCLUDE_FROZEN)); + const maxConcurrentShardRequests = await uiSettingsClient.get( + UI_SETTINGS.COURIER_MAX_CONCURRENT_SHARD_REQUESTS + ); return { - timeout: `${config.elasticsearch.shardTimeout.asMilliseconds()}ms`, + maxConcurrentShardRequests: + maxConcurrentShardRequests > 0 ? maxConcurrentShardRequests : undefined, + ignoreThrottled, ignoreUnavailable: true, // Don't fail if the index/indices don't exist - restTotalHitsAsInt: true, // Get the number of hits as an int rather than a range + trackTotalHits: true, }; } diff --git a/src/plugins/data/server/search/es_search/index.ts b/src/plugins/data/server/search/es_search/index.ts index 20006b70730d8..1bd17fc986168 100644 --- a/src/plugins/data/server/search/es_search/index.ts +++ b/src/plugins/data/server/search/es_search/index.ts @@ -17,7 +17,9 @@ * under the License. */ -export { ES_SEARCH_STRATEGY, IEsSearchRequest, IEsSearchResponse } from '../../../common/search'; export { esSearchStrategyProvider } from './es_search_strategy'; -export { getDefaultSearchParams } from './get_default_search_params'; +export * from './get_default_search_params'; export { getTotalLoaded } from './get_total_loaded'; +export * from './to_snake_case'; + +export { ES_SEARCH_STRATEGY, IEsSearchRequest, IEsSearchResponse } from '../../../common'; diff --git a/src/plugins/data/server/search/es_search/to_snake_case.ts b/src/plugins/data/server/search/es_search/to_snake_case.ts new file mode 100644 index 0000000000000..74f156274cbc6 --- /dev/null +++ b/src/plugins/data/server/search/es_search/to_snake_case.ts @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { mapKeys, snakeCase } from 'lodash'; + +export function toSnakeCase(obj: Record) { + return mapKeys(obj, (value, key) => snakeCase(key)); +} diff --git a/src/plugins/data/server/search/index.ts b/src/plugins/data/server/search/index.ts index 02c21c3254645..b671ed806510b 100644 --- a/src/plugins/data/server/search/index.ts +++ b/src/plugins/data/server/search/index.ts @@ -17,16 +17,12 @@ * under the License. */ -export { - ISearchStrategy, - ISearchOptions, - ISearchSetup, - ISearchStart, - SearchEnhancements, -} from './types'; +export { ISearchStrategy, ISearchSetup, ISearchStart, SearchEnhancements } from './types'; -export { getDefaultSearchParams, getTotalLoaded } from './es_search'; +export * from './es_search'; export { usageProvider, SearchUsage } from './collectors'; export * from './aggs'; + +export { shimHitsTotal } from './routes'; diff --git a/src/plugins/data/server/search/routes.test.ts b/src/plugins/data/server/search/routes.test.ts deleted file mode 100644 index d91aeee1fe818..0000000000000 --- a/src/plugins/data/server/search/routes.test.ts +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { CoreSetup, RequestHandlerContext } from '../../../../../src/core/server'; -import { coreMock, httpServerMock } from '../../../../../src/core/server/mocks'; -import { registerSearchRoute } from './routes'; -import { DataPluginStart } from '../plugin'; -import { dataPluginMock } from '../mocks'; - -describe('Search service', () => { - let mockDataStart: MockedKeys; - let mockCoreSetup: MockedKeys>; - - beforeEach(() => { - mockDataStart = dataPluginMock.createStartContract(); - mockCoreSetup = coreMock.createSetup({ pluginStartContract: mockDataStart }); - }); - - it('handler calls context.search.search with the given request and strategy', async () => { - const response = { id: 'yay' }; - mockDataStart.search.search.mockResolvedValue(response); - const mockContext = {}; - const mockBody = { id: undefined, params: {} }; - const mockParams = { strategy: 'foo' }; - const mockRequest = httpServerMock.createKibanaRequest({ - body: mockBody, - params: mockParams, - }); - const mockResponse = httpServerMock.createResponseFactory(); - - registerSearchRoute(mockCoreSetup); - - const mockRouter = mockCoreSetup.http.createRouter.mock.results[0].value; - const handler = mockRouter.post.mock.calls[0][1]; - await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); - - expect(mockDataStart.search.search).toBeCalled(); - expect(mockDataStart.search.search.mock.calls[0][1]).toStrictEqual(mockBody); - expect(mockResponse.ok).toBeCalled(); - expect(mockResponse.ok.mock.calls[0][0]).toEqual({ - body: response, - }); - }); - - it('handler throws an error if the search throws an error', async () => { - mockDataStart.search.search.mockRejectedValue({ - message: 'oh no', - body: { - error: 'oops', - }, - }); - - const mockContext = {}; - const mockBody = { id: undefined, params: {} }; - const mockParams = { strategy: 'foo' }; - const mockRequest = httpServerMock.createKibanaRequest({ - body: mockBody, - params: mockParams, - }); - const mockResponse = httpServerMock.createResponseFactory(); - - registerSearchRoute(mockCoreSetup); - - const mockRouter = mockCoreSetup.http.createRouter.mock.results[0].value; - const handler = mockRouter.post.mock.calls[0][1]; - await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); - - expect(mockDataStart.search.search).toBeCalled(); - expect(mockDataStart.search.search.mock.calls[0][1]).toStrictEqual(mockBody); - expect(mockResponse.customError).toBeCalled(); - const error: any = mockResponse.customError.mock.calls[0][0]; - expect(error.body.message).toBe('oh no'); - expect(error.body.attributes.error).toBe('oops'); - }); -}); diff --git a/src/plugins/data/server/search/routes.ts b/src/plugins/data/server/search/routes.ts deleted file mode 100644 index 3d813f745305f..0000000000000 --- a/src/plugins/data/server/search/routes.ts +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { schema } from '@kbn/config-schema'; -import { CoreSetup } from '../../../../core/server'; -import { getRequestAbortedSignal } from '../lib'; -import { DataPluginStart } from '../plugin'; - -export function registerSearchRoute(core: CoreSetup): void { - const router = core.http.createRouter(); - - router.post( - { - path: '/internal/search/{strategy}/{id?}', - validate: { - params: schema.object({ - strategy: schema.string(), - id: schema.maybe(schema.string()), - }), - - query: schema.object({}, { unknowns: 'allow' }), - - body: schema.object({}, { unknowns: 'allow' }), - }, - }, - async (context, request, res) => { - const searchRequest = request.body; - const { strategy, id } = request.params; - const signal = getRequestAbortedSignal(request.events.aborted$); - - const [, , selfStart] = await core.getStartServices(); - - try { - const response = await selfStart.search.search( - context, - { ...searchRequest, id }, - { - signal, - strategy, - } - ); - return res.ok({ body: response }); - } catch (err) { - return res.customError({ - statusCode: err.statusCode || 500, - body: { - message: err.message, - attributes: { - error: err.body?.error || err.message, - }, - }, - }); - } - } - ); - - router.delete( - { - path: '/internal/search/{strategy}/{id}', - validate: { - params: schema.object({ - strategy: schema.string(), - id: schema.string(), - }), - - query: schema.object({}, { unknowns: 'allow' }), - }, - }, - async (context, request, res) => { - const { strategy, id } = request.params; - - const [, , selfStart] = await core.getStartServices(); - const searchStrategy = selfStart.search.getSearchStrategy(strategy); - if (!searchStrategy.cancel) return res.ok(); - - try { - await searchStrategy.cancel(context, id); - return res.ok(); - } catch (err) { - return res.customError({ - statusCode: err.statusCode, - body: { - message: err.message, - attributes: { - error: err.body.error, - }, - }, - }); - } - } - ); -} diff --git a/src/plugins/data/server/search/routes/index.ts b/src/plugins/data/server/search/routes/index.ts new file mode 100644 index 0000000000000..a290f08f9b843 --- /dev/null +++ b/src/plugins/data/server/search/routes/index.ts @@ -0,0 +1,22 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export * from './msearch'; +export * from './search'; +export * from './shim_hits_total'; diff --git a/src/plugins/data/server/search/routes/msearch.test.ts b/src/plugins/data/server/search/routes/msearch.test.ts new file mode 100644 index 0000000000000..3a7d67c31b8be --- /dev/null +++ b/src/plugins/data/server/search/routes/msearch.test.ts @@ -0,0 +1,150 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Observable } from 'rxjs'; + +import { + CoreSetup, + RequestHandlerContext, + SharedGlobalConfig, + StartServicesAccessor, +} from 'src/core/server'; +import { + coreMock, + httpServerMock, + pluginInitializerContextConfigMock, +} from '../../../../../../src/core/server/mocks'; +import { registerMsearchRoute, convertRequestBody } from './msearch'; +import { DataPluginStart } from '../../plugin'; +import { dataPluginMock } from '../../mocks'; + +describe('msearch route', () => { + let mockDataStart: MockedKeys; + let mockCoreSetup: MockedKeys>; + let getStartServices: jest.Mocked>; + let globalConfig$: Observable; + + beforeEach(() => { + mockDataStart = dataPluginMock.createStartContract(); + mockCoreSetup = coreMock.createSetup({ pluginStartContract: mockDataStart }); + getStartServices = mockCoreSetup.getStartServices; + globalConfig$ = pluginInitializerContextConfigMock({}).legacy.globalConfig$; + }); + + it('handler calls /_msearch with the given request', async () => { + const response = { id: 'yay', body: { responses: [{ hits: { total: 5 } }] } }; + const mockClient = { transport: { request: jest.fn().mockResolvedValue(response) } }; + const mockContext = { + core: { + elasticsearch: { client: { asCurrentUser: mockClient } }, + uiSettings: { client: { get: jest.fn() } }, + }, + }; + const mockBody = { searches: [{ header: {}, body: {} }] }; + const mockQuery = {}; + const mockRequest = httpServerMock.createKibanaRequest({ + body: mockBody, + query: mockQuery, + }); + const mockResponse = httpServerMock.createResponseFactory(); + + registerMsearchRoute(mockCoreSetup.http.createRouter(), { getStartServices, globalConfig$ }); + + const mockRouter = mockCoreSetup.http.createRouter.mock.results[0].value; + const handler = mockRouter.post.mock.calls[0][1]; + await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); + + expect(mockClient.transport.request.mock.calls[0][0].method).toBe('GET'); + expect(mockClient.transport.request.mock.calls[0][0].path).toBe('/_msearch'); + expect(mockClient.transport.request.mock.calls[0][0].body).toEqual( + convertRequestBody(mockBody as any, {}) + ); + expect(mockResponse.ok).toBeCalled(); + expect(mockResponse.ok.mock.calls[0][0]).toEqual({ + body: response, + }); + }); + + it('handler throws an error if the search throws an error', async () => { + const response = { + message: 'oh no', + body: { + error: 'oops', + }, + }; + const mockClient = { + transport: { request: jest.fn().mockReturnValue(Promise.reject(response)) }, + }; + const mockContext = { + core: { + elasticsearch: { client: { asCurrentUser: mockClient } }, + uiSettings: { client: { get: jest.fn() } }, + }, + }; + const mockBody = { searches: [{ header: {}, body: {} }] }; + const mockQuery = {}; + const mockRequest = httpServerMock.createKibanaRequest({ + body: mockBody, + query: mockQuery, + }); + const mockResponse = httpServerMock.createResponseFactory(); + + registerMsearchRoute(mockCoreSetup.http.createRouter(), { getStartServices, globalConfig$ }); + + const mockRouter = mockCoreSetup.http.createRouter.mock.results[0].value; + const handler = mockRouter.post.mock.calls[0][1]; + await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); + + expect(mockClient.transport.request).toBeCalled(); + expect(mockResponse.customError).toBeCalled(); + + const error: any = mockResponse.customError.mock.calls[0][0]; + expect(error.body.message).toBe('oh no'); + expect(error.body.attributes.error).toBe('oops'); + }); + + describe('convertRequestBody', () => { + it('combines header & body into proper msearch request', () => { + const request = { + searches: [{ header: { index: 'foo', preference: 0 }, body: { test: true } }], + }; + expect(convertRequestBody(request, { timeout: '30000ms' })).toMatchInlineSnapshot(` + "{\\"ignore_unavailable\\":true,\\"index\\":\\"foo\\",\\"preference\\":0} + {\\"timeout\\":\\"30000ms\\",\\"test\\":true} + " + `); + }); + + it('handles multiple searches', () => { + const request = { + searches: [ + { header: { index: 'foo', preference: 0 }, body: { test: true } }, + { header: { index: 'bar', preference: 1 }, body: { hello: 'world' } }, + ], + }; + expect(convertRequestBody(request, { timeout: '30000ms' })).toMatchInlineSnapshot(` + "{\\"ignore_unavailable\\":true,\\"index\\":\\"foo\\",\\"preference\\":0} + {\\"timeout\\":\\"30000ms\\",\\"test\\":true} + {\\"ignore_unavailable\\":true,\\"index\\":\\"bar\\",\\"preference\\":1} + {\\"timeout\\":\\"30000ms\\",\\"hello\\":\\"world\\"} + " + `); + }); + }); +}); diff --git a/src/plugins/data/server/search/routes/msearch.ts b/src/plugins/data/server/search/routes/msearch.ts new file mode 100644 index 0000000000000..e1ddb06e4fb6f --- /dev/null +++ b/src/plugins/data/server/search/routes/msearch.ts @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { first } from 'rxjs/operators'; +import { schema } from '@kbn/config-schema'; + +import { SearchResponse } from 'elasticsearch'; +import { IRouter } from 'src/core/server'; +import { SearchRouteDependencies } from '../search_service'; +import { shimHitsTotal } from './shim_hits_total'; +import { getShardTimeout, getDefaultSearchParams, toSnakeCase } from '..'; + +interface MsearchHeaders { + index: string; + preference?: number | string; +} + +interface MsearchRequest { + header: MsearchHeaders; + body: any; +} + +interface RequestBody { + searches: MsearchRequest[]; +} + +/** @internal */ +export function convertRequestBody( + requestBody: RequestBody, + { timeout }: { timeout?: string } +): string { + return requestBody.searches.reduce((req, curr) => { + const header = JSON.stringify({ + ignore_unavailable: true, + ...curr.header, + }); + const body = JSON.stringify({ + timeout, + ...curr.body, + }); + return `${req}${header}\n${body}\n`; + }, ''); +} + +/** + * The msearch route takes in an array of searches, each consisting of header + * and body json, and reformts them into a single request for the _msearch API. + * + * The reason for taking requests in a different format is so that we can + * inject values into each request without needing to manually parse each one. + * + * This route is internal and _should not be used_ in any new areas of code. + * It only exists as a means of removing remaining dependencies on the + * legacy ES client. + * + * @deprecated + */ +export function registerMsearchRoute(router: IRouter, deps: SearchRouteDependencies): void { + router.post( + { + path: '/internal/_msearch', + validate: { + body: schema.object({ + searches: schema.arrayOf( + schema.object({ + header: schema.object( + { + index: schema.string(), + preference: schema.maybe(schema.oneOf([schema.number(), schema.string()])), + }, + { unknowns: 'allow' } + ), + body: schema.object({}, { unknowns: 'allow' }), + }) + ), + }), + }, + }, + async (context, request, res) => { + const client = context.core.elasticsearch.client.asCurrentUser; + + // get shardTimeout + const config = await deps.globalConfig$.pipe(first()).toPromise(); + const timeout = getShardTimeout(config); + + const body = convertRequestBody(request.body, timeout); + + // trackTotalHits is not supported by msearch + const { trackTotalHits, ...defaultParams } = await getDefaultSearchParams( + context.core.uiSettings.client + ); + + try { + const response = await client.transport.request({ + method: 'GET', + path: '/_msearch', + body, + querystring: toSnakeCase(defaultParams), + }); + + return res.ok({ + body: { + ...response, + body: { + responses: response.body.responses?.map((r: SearchResponse) => shimHitsTotal(r)), + }, + }, + }); + } catch (err) { + return res.customError({ + statusCode: err.statusCode || 500, + body: { + message: err.message, + attributes: { + error: err.body?.error || err.message, + }, + }, + }); + } + } + ); +} diff --git a/src/plugins/data/server/search/routes/search.test.ts b/src/plugins/data/server/search/routes/search.test.ts new file mode 100644 index 0000000000000..d4404c318ab47 --- /dev/null +++ b/src/plugins/data/server/search/routes/search.test.ts @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Observable } from 'rxjs'; + +import { + CoreSetup, + RequestHandlerContext, + SharedGlobalConfig, + StartServicesAccessor, +} from 'src/core/server'; +import { + coreMock, + httpServerMock, + pluginInitializerContextConfigMock, +} from '../../../../../../src/core/server/mocks'; +import { registerSearchRoute } from './search'; +import { DataPluginStart } from '../../plugin'; +import { dataPluginMock } from '../../mocks'; + +describe('Search service', () => { + let mockDataStart: MockedKeys; + let mockCoreSetup: MockedKeys>; + let getStartServices: jest.Mocked>; + let globalConfig$: Observable; + + beforeEach(() => { + mockDataStart = dataPluginMock.createStartContract(); + mockCoreSetup = coreMock.createSetup({ pluginStartContract: mockDataStart }); + getStartServices = mockCoreSetup.getStartServices; + globalConfig$ = pluginInitializerContextConfigMock({}).legacy.globalConfig$; + }); + + it('handler calls context.search.search with the given request and strategy', async () => { + const response = { + id: 'yay', + rawResponse: { + took: 100, + timed_out: true, + _shards: { + total: 0, + successful: 0, + failed: 0, + skipped: 0, + }, + hits: { + total: 0, + max_score: 0, + hits: [], + }, + }, + }; + mockDataStart.search.search.mockResolvedValue(response); + const mockContext = {}; + const mockBody = { id: undefined, params: {} }; + const mockParams = { strategy: 'foo' }; + const mockRequest = httpServerMock.createKibanaRequest({ + body: mockBody, + params: mockParams, + }); + const mockResponse = httpServerMock.createResponseFactory(); + + registerSearchRoute(mockCoreSetup.http.createRouter(), { getStartServices, globalConfig$ }); + + const mockRouter = mockCoreSetup.http.createRouter.mock.results[0].value; + const handler = mockRouter.post.mock.calls[0][1]; + await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); + + expect(mockDataStart.search.search).toBeCalled(); + expect(mockDataStart.search.search.mock.calls[0][1]).toStrictEqual(mockBody); + expect(mockResponse.ok).toBeCalled(); + expect(mockResponse.ok.mock.calls[0][0]).toEqual({ + body: response, + }); + }); + + it('handler throws an error if the search throws an error', async () => { + mockDataStart.search.search.mockRejectedValue({ + message: 'oh no', + body: { + error: 'oops', + }, + }); + + const mockContext = {}; + const mockBody = { id: undefined, params: {} }; + const mockParams = { strategy: 'foo' }; + const mockRequest = httpServerMock.createKibanaRequest({ + body: mockBody, + params: mockParams, + }); + const mockResponse = httpServerMock.createResponseFactory(); + + registerSearchRoute(mockCoreSetup.http.createRouter(), { getStartServices, globalConfig$ }); + + const mockRouter = mockCoreSetup.http.createRouter.mock.results[0].value; + const handler = mockRouter.post.mock.calls[0][1]; + await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); + + expect(mockDataStart.search.search).toBeCalled(); + expect(mockDataStart.search.search.mock.calls[0][1]).toStrictEqual(mockBody); + expect(mockResponse.customError).toBeCalled(); + const error: any = mockResponse.customError.mock.calls[0][0]; + expect(error.body.message).toBe('oh no'); + expect(error.body.attributes.error).toBe('oops'); + }); +}); diff --git a/src/plugins/data/server/search/routes/search.ts b/src/plugins/data/server/search/routes/search.ts new file mode 100644 index 0000000000000..b5d5ec283767d --- /dev/null +++ b/src/plugins/data/server/search/routes/search.ts @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { schema } from '@kbn/config-schema'; +import { IRouter } from 'src/core/server'; +import { getRequestAbortedSignal } from '../../lib'; +import { SearchRouteDependencies } from '../search_service'; +import { shimHitsTotal } from './shim_hits_total'; +import { isEsResponse } from '../../../common'; + +export function registerSearchRoute( + router: IRouter, + { getStartServices }: SearchRouteDependencies +): void { + router.post( + { + path: '/internal/search/{strategy}/{id?}', + validate: { + params: schema.object({ + strategy: schema.string(), + id: schema.maybe(schema.string()), + }), + + query: schema.object({}, { unknowns: 'allow' }), + + body: schema.object({}, { unknowns: 'allow' }), + }, + }, + async (context, request, res) => { + const searchRequest = request.body; + const { strategy, id } = request.params; + const abortSignal = getRequestAbortedSignal(request.events.aborted$); + + const [, , selfStart] = await getStartServices(); + + try { + const response = await selfStart.search.search( + context, + { ...searchRequest, id }, + { + abortSignal, + strategy, + } + ); + + return res.ok({ + body: { + ...response, + ...(isEsResponse(response) + ? { + rawResponse: shimHitsTotal(response.rawResponse), + } + : {}), + }, + }); + } catch (err) { + return res.customError({ + statusCode: err.statusCode || 500, + body: { + message: err.message, + attributes: { + error: err.body?.error || err.message, + }, + }, + }); + } + } + ); + + router.delete( + { + path: '/internal/search/{strategy}/{id}', + validate: { + params: schema.object({ + strategy: schema.string(), + id: schema.string(), + }), + + query: schema.object({}, { unknowns: 'allow' }), + }, + }, + async (context, request, res) => { + const { strategy, id } = request.params; + + const [, , selfStart] = await getStartServices(); + const searchStrategy = selfStart.search.getSearchStrategy(strategy); + if (!searchStrategy.cancel) return res.ok(); + + try { + await searchStrategy.cancel(context, id); + return res.ok(); + } catch (err) { + return res.customError({ + statusCode: err.statusCode, + body: { + message: err.message, + attributes: { + error: err.body.error, + }, + }, + }); + } + } + ); +} diff --git a/src/plugins/data/server/search/routes/shim_hits_total.test.ts b/src/plugins/data/server/search/routes/shim_hits_total.test.ts new file mode 100644 index 0000000000000..0f24735386121 --- /dev/null +++ b/src/plugins/data/server/search/routes/shim_hits_total.test.ts @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { shimHitsTotal } from './shim_hits_total'; + +describe('shimHitsTotal', () => { + test('returns the total if it is already numeric', () => { + const result = shimHitsTotal({ + hits: { + total: 5, + }, + } as any); + expect(result).toEqual({ + hits: { + total: 5, + }, + }); + }); + + test('returns the total if it is inside `value`', () => { + const result = shimHitsTotal({ + hits: { + total: { + value: 5, + }, + }, + } as any); + expect(result).toEqual({ + hits: { + total: 5, + }, + }); + }); + + test('returns other properties from the response', () => { + const result = shimHitsTotal({ + _shards: {}, + hits: { + hits: [], + total: { + value: 5, + }, + }, + } as any); + expect(result).toEqual({ + _shards: {}, + hits: { + hits: [], + total: 5, + }, + }); + }); +}); diff --git a/src/plugins/data/server/search/routes/shim_hits_total.ts b/src/plugins/data/server/search/routes/shim_hits_total.ts new file mode 100644 index 0000000000000..5f95b21358978 --- /dev/null +++ b/src/plugins/data/server/search/routes/shim_hits_total.ts @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { SearchResponse } from 'elasticsearch'; + +/** + * Temporary workaround until https://github.com/elastic/kibana/issues/26356 is addressed. + * Since we are setting `track_total_hits` in the request, `hits.total` will be an object + * containing the `value`. + * + * @internal + */ +export function shimHitsTotal(response: SearchResponse) { + const total = (response.hits?.total as any)?.value ?? response.hits?.total; + const hits = { ...response.hits, total }; + return { ...response, hits }; +} diff --git a/src/plugins/data/server/search/search_service.ts b/src/plugins/data/server/search/search_service.ts index edc94961c79d8..e19d3dd8a5451 100644 --- a/src/plugins/data/server/search/search_service.ts +++ b/src/plugins/data/server/search/search_service.ts @@ -17,6 +17,7 @@ * under the License. */ +import { Observable } from 'rxjs'; import { CoreSetup, CoreStart, @@ -24,20 +25,22 @@ import { Plugin, PluginInitializerContext, RequestHandlerContext, -} from '../../../../core/server'; + SharedGlobalConfig, + StartServicesAccessor, +} from 'src/core/server'; import { ISearchSetup, ISearchStart, ISearchStrategy, SearchEnhancements } from './types'; import { AggsService, AggsSetupDependencies } from './aggs'; import { FieldFormatsStart } from '../field_formats'; -import { registerSearchRoute } from './routes'; +import { registerMsearchRoute, registerSearchRoute } from './routes'; import { ES_SEARCH_STRATEGY, esSearchStrategyProvider } from './es_search'; import { DataPluginStart } from '../plugin'; import { UsageCollectionSetup } from '../../../usage_collection/server'; import { registerUsageCollector } from './collectors/register'; import { usageProvider } from './collectors/usage'; import { searchTelemetry } from '../saved_objects'; -import { IEsSearchRequest, IEsSearchResponse } from '../../common'; +import { IEsSearchRequest, IEsSearchResponse, ISearchOptions } from '../../common'; type StrategyMap< SearchStrategyRequest extends IEsSearchRequest = IEsSearchRequest, @@ -55,6 +58,12 @@ export interface SearchServiceStartDependencies { fieldFormats: FieldFormatsStart; } +/** @internal */ +export interface SearchRouteDependencies { + getStartServices: StartServicesAccessor<{}, DataPluginStart>; + globalConfig$: Observable; +} + export class SearchService implements Plugin { private readonly aggsService = new AggsService(); private defaultSearchStrategyName: string = ES_SEARCH_STRATEGY; @@ -66,11 +75,19 @@ export class SearchService implements Plugin { ) {} public setup( - core: CoreSetup, + core: CoreSetup<{}, DataPluginStart>, { registerFunction, usageCollection }: SearchServiceSetupDependencies ): ISearchSetup { const usage = usageCollection ? usageProvider(core) : undefined; + const router = core.http.createRouter(); + const routeDependencies = { + getStartServices: core.getStartServices, + globalConfig$: this.initializerContext.config.legacy.globalConfig$, + }; + registerSearchRoute(router, routeDependencies); + registerMsearchRoute(router, routeDependencies); + this.registerSearchStrategy( ES_SEARCH_STRATEGY, esSearchStrategyProvider( @@ -85,8 +102,6 @@ export class SearchService implements Plugin { registerUsageCollector(usageCollection, this.initializerContext); } - registerSearchRoute(core); - return { __enhance: (enhancements: SearchEnhancements) => { if (this.searchStrategies.hasOwnProperty(enhancements.defaultStrategy)) { @@ -102,11 +117,13 @@ export class SearchService implements Plugin { private search( context: RequestHandlerContext, searchRequest: IEsSearchRequest, - options: Record + options: ISearchOptions ) { - return this.getSearchStrategy( - options.strategy || this.defaultSearchStrategyName - ).search(context, searchRequest, { signal: options.signal }); + return this.getSearchStrategy(options.strategy || this.defaultSearchStrategyName).search( + context, + searchRequest, + options + ); } public start( diff --git a/src/plugins/data/server/search/types.ts b/src/plugins/data/server/search/types.ts index 5ce1bb3e6b9f8..b2b958454de48 100644 --- a/src/plugins/data/server/search/types.ts +++ b/src/plugins/data/server/search/types.ts @@ -18,7 +18,7 @@ */ import { RequestHandlerContext } from '../../../../core/server'; -import { IKibanaSearchResponse, IKibanaSearchRequest } from '../../common/search'; +import { ISearchOptions } from '../../common/search'; import { AggsSetup, AggsStart } from './aggs'; import { SearchUsage } from './collectors/usage'; import { IEsSearchRequest, IEsSearchResponse } from './es_search'; @@ -27,14 +27,6 @@ export interface SearchEnhancements { defaultStrategy: string; } -export interface ISearchOptions { - /** - * An `AbortSignal` that allows the caller of `search` to abort a search request. - */ - signal?: AbortSignal; - strategy?: string; -} - export interface ISearchSetup { aggs: AggsSetup; /** @@ -74,9 +66,9 @@ export interface ISearchStart< ) => ISearchStrategy; search: ( context: RequestHandlerContext, - request: IKibanaSearchRequest, + request: IEsSearchRequest, options: ISearchOptions - ) => Promise; + ) => Promise; } /** diff --git a/src/plugins/data/server/server.api.md b/src/plugins/data/server/server.api.md index 9f114f2132009..cd0369a5c4551 100644 --- a/src/plugins/data/server/server.api.md +++ b/src/plugins/data/server/server.api.md @@ -48,7 +48,6 @@ import { ExistsParams } from 'elasticsearch'; import { ExplainParams } from 'elasticsearch'; import { ExpressionAstFunction } from 'src/plugins/expressions/common'; import { ExpressionsServerSetup } from 'src/plugins/expressions/server'; -import { FetchOptions } from 'src/plugins/data/public'; import { FieldStatsParams } from 'elasticsearch'; import { GenericParams } from 'elasticsearch'; import { GetParams } from 'elasticsearch'; @@ -99,6 +98,7 @@ import { IngestDeletePipelineParams } from 'elasticsearch'; import { IngestGetPipelineParams } from 'elasticsearch'; import { IngestPutPipelineParams } from 'elasticsearch'; import { IngestSimulateParams } from 'elasticsearch'; +import { ISearchOptions as ISearchOptions_2 } from 'src/plugins/data/public'; import { ISearchSource } from 'src/plugins/data/public'; import { KibanaClient } from '@elastic/elasticsearch/api/kibana'; import { KibanaConfigType as KibanaConfigType_2 } from 'src/core/server/kibana_config'; @@ -323,6 +323,11 @@ export enum ES_FIELD_TYPES { _TYPE = "_type" } +// Warning: (ae-missing-release-tag) "ES_SEARCH_STRATEGY" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) +// +// @public (undocumented) +export const ES_SEARCH_STRATEGY = "es"; + // Warning: (ae-forgotten-export) The symbol "ExpressionFunctionDefinition" needs to be exported by the entry point index.d.ts // Warning: (ae-forgotten-export) The symbol "Input" needs to be exported by the entry point index.d.ts // Warning: (ae-forgotten-export) The symbol "Arguments" needs to be exported by the entry point index.d.ts @@ -441,14 +446,25 @@ export interface Filter { query?: any; } -// Warning: (ae-forgotten-export) The symbol "SharedGlobalConfig" needs to be exported by the entry point index.d.ts +// Warning: (ae-forgotten-export) The symbol "IUiSettingsClient" needs to be exported by the entry point index.d.ts // Warning: (ae-missing-release-tag) "getDefaultSearchParams" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) -export function getDefaultSearchParams(config: SharedGlobalConfig): { - timeout: string; +export function getDefaultSearchParams(uiSettingsClient: IUiSettingsClient): Promise<{ + maxConcurrentShardRequests: number | undefined; + ignoreThrottled: boolean; ignoreUnavailable: boolean; - restTotalHitsAsInt: boolean; + trackTotalHits: boolean; +}>; + +// Warning: (ae-forgotten-export) The symbol "SharedGlobalConfig" needs to be exported by the entry point index.d.ts +// Warning: (ae-missing-release-tag) "getShardTimeout" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) +// +// @public (undocumented) +export function getShardTimeout(config: SharedGlobalConfig): { + timeout: string; +} | { + timeout?: undefined; }; // Warning: (ae-missing-release-tag) "getTime" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) @@ -565,7 +581,9 @@ export interface IFieldType { // Warning: (ae-forgotten-export) The symbol "FieldSpec" needs to be exported by the entry point index.d.ts // // (undocumented) - toSpec?: () => FieldSpec; + toSpec?: (options?: { + getFormatterForField?: IndexPattern['getFormatterForField']; + }) => FieldSpec; // (undocumented) type: string; // (undocumented) @@ -676,8 +694,7 @@ export class IndexPatternsFetcher { // // @public (undocumented) export interface ISearchOptions { - signal?: AbortSignal; - // (undocumented) + abortSignal?: AbortSignal; strategy?: string; } @@ -709,7 +726,7 @@ export interface ISearchStart Promise; + search: (context: RequestHandlerContext, request: IEsSearchRequest, options: ISearchOptions) => Promise; } // Warning: (ae-missing-release-tag) "ISearchStrategy" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) @@ -867,7 +884,7 @@ export class Plugin implements Plugin_2>; + search: ISearchStart>; fieldFormats: { fieldFormatServiceFactory: (uiSettings: import("../../../core/server").IUiSettingsClient) => Promise; }; @@ -983,6 +1000,33 @@ export interface SearchUsage { trackSuccess(duration: number): Promise; } +// @internal +export function shimHitsTotal(response: SearchResponse): { + hits: { + total: any; + max_score: number; + hits: { + _index: string; + _type: string; + _id: string; + _score: number; + _source: any; + _version?: number | undefined; + _explanation?: import("elasticsearch").Explanation | undefined; + fields?: any; + highlight?: any; + inner_hits?: any; + matched_queries?: string[] | undefined; + sort?: string[] | undefined; + }[]; + }; + took: number; + timed_out: boolean; + _scroll_id?: string | undefined; + _shards: import("elasticsearch").ShardsResponse; + aggregations?: any; +}; + // Warning: (ae-missing-release-tag) "shouldReadFieldFromDocValues" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) @@ -1021,6 +1065,11 @@ export interface TimeRange { to: string; } +// Warning: (ae-missing-release-tag) "toSnakeCase" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) +// +// @public (undocumented) +export function toSnakeCase(obj: Record): import("lodash").Dictionary; + // Warning: (ae-missing-release-tag) "UI_SETTINGS" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) @@ -1037,6 +1086,7 @@ export const UI_SETTINGS: { readonly COURIER_MAX_CONCURRENT_SHARD_REQUESTS: "courier:maxConcurrentShardRequests"; readonly COURIER_BATCH_SEARCHES: "courier:batchSearches"; readonly SEARCH_INCLUDE_FROZEN: "search:includeFrozen"; + readonly SEARCH_TIMEOUT: "search:timeout"; readonly HISTOGRAM_BAR_TARGET: "histogram:barTarget"; readonly HISTOGRAM_MAX_BARS: "histogram:maxBars"; readonly HISTORY_LIMIT: "history:limit"; @@ -1063,6 +1113,7 @@ export function usageProvider(core: CoreSetup_2): SearchUsage; // Warnings were encountered during analysis: // +// src/plugins/data/common/index_patterns/fields/types.ts:41:25 - (ae-forgotten-export) The symbol "IndexPattern" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:40:23 - (ae-forgotten-export) The symbol "buildCustomFilter" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:40:23 - (ae-forgotten-export) The symbol "buildFilter" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:71:21 - (ae-forgotten-export) The symbol "getEsQueryConfig" needs to be exported by the entry point index.d.ts @@ -1084,19 +1135,19 @@ export function usageProvider(core: CoreSetup_2): SearchUsage; // src/plugins/data/server/index.ts:101:26 - (ae-forgotten-export) The symbol "TruncateFormat" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:127:27 - (ae-forgotten-export) The symbol "isFilterable" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:127:27 - (ae-forgotten-export) The symbol "isNestedField" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:221:20 - (ae-forgotten-export) The symbol "getRequestInspectorStats" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:221:20 - (ae-forgotten-export) The symbol "getResponseInspectorStats" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:221:20 - (ae-forgotten-export) The symbol "tabifyAggResponse" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:221:20 - (ae-forgotten-export) The symbol "tabifyGetColumns" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:223:1 - (ae-forgotten-export) The symbol "CidrMask" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:224:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:233:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:234:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:235:1 - (ae-forgotten-export) The symbol "Ipv4Address" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:239:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:240:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:244:1 - (ae-forgotten-export) The symbol "propFilter" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:247:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:225:20 - (ae-forgotten-export) The symbol "getRequestInspectorStats" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:225:20 - (ae-forgotten-export) The symbol "getResponseInspectorStats" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:225:20 - (ae-forgotten-export) The symbol "tabifyAggResponse" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:225:20 - (ae-forgotten-export) The symbol "tabifyGetColumns" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:227:1 - (ae-forgotten-export) The symbol "CidrMask" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:228:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:237:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:238:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:239:1 - (ae-forgotten-export) The symbol "Ipv4Address" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:243:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:244:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:248:1 - (ae-forgotten-export) The symbol "propFilter" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:251:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts // src/plugins/data/server/plugin.ts:88:66 - (ae-forgotten-export) The symbol "DataEnhancements" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) diff --git a/src/plugins/dev_tools/public/application.tsx b/src/plugins/dev_tools/public/application.tsx index 46f09a8ebb879..d3a54627b0240 100644 --- a/src/plugins/dev_tools/public/application.tsx +++ b/src/plugins/dev_tools/public/application.tsx @@ -90,6 +90,7 @@ function DevToolsWrapper({ devTools, activeDevTool, updateRoute }: DevToolsWrapp element, appBasePath: '', onAppLeave: () => undefined, + setHeaderActionMenu: () => undefined, // TODO: adapt to use Core's ScopedHistory history: {} as any, }; diff --git a/src/plugins/discover/public/application/angular/discover.html b/src/plugins/discover/public/application/angular/discover.html index d3d4f524873d8..94f13e1cd8132 100644 --- a/src/plugins/discover/public/application/angular/discover.html +++ b/src/plugins/discover/public/application/angular/discover.html @@ -31,7 +31,6 @@

{{screenTitle}}

on-remove-field="removeColumn" selected-index-pattern="searchSource.getField('index')" set-index-pattern="setIndexPattern" - state="state" >
@@ -78,11 +77,11 @@

{{screenTitle}}

class="dscTimechart" ng-if="opts.timefield" > - { diff --git a/src/plugins/discover/public/application/components/sidebar/discover_field.test.tsx b/src/plugins/discover/public/application/components/sidebar/discover_field.test.tsx index a0d9e3c541e47..b03b37da40908 100644 --- a/src/plugins/discover/public/application/components/sidebar/discover_field.test.tsx +++ b/src/plugins/discover/public/application/components/sidebar/discover_field.test.tsx @@ -62,7 +62,6 @@ function getComponent(selected = false, showDetails = false, useShortDots = fals ); const field = new IndexPatternField( - indexPattern, { name: 'bytes', type: 'number', @@ -73,18 +72,16 @@ function getComponent(selected = false, showDetails = false, useShortDots = fals aggregatable: true, readFromDocValues: true, }, - 'bytes', - () => {} + 'bytes' ); const props = { indexPattern, field, - getDetails: jest.fn(), + getDetails: jest.fn(() => ({ buckets: [], error: '', exists: 1, total: true, columns: [] })), onAddFilter: jest.fn(), onAddField: jest.fn(), onRemoveField: jest.fn(), - onShowDetails: jest.fn(), showDetails, selected, useShortDots, @@ -104,4 +101,9 @@ describe('discover sidebar field', function () { findTestSubject(comp, 'fieldToggle-bytes').simulate('click'); expect(props.onRemoveField).toHaveBeenCalledWith('bytes'); }); + it('should trigger getDetails', function () { + const { comp, props } = getComponent(true); + findTestSubject(comp, 'field-bytes-showDetails').simulate('click'); + expect(props.getDetails).toHaveBeenCalledWith(props.field); + }); }); diff --git a/src/plugins/discover/public/application/components/sidebar/discover_field.tsx b/src/plugins/discover/public/application/components/sidebar/discover_field.tsx index 639dbfe09277c..bb330cba68e2e 100644 --- a/src/plugins/discover/public/application/components/sidebar/discover_field.tsx +++ b/src/plugins/discover/public/application/components/sidebar/discover_field.tsx @@ -133,6 +133,9 @@ export function DiscoverField({ iconType="plusInCircleFilled" className="dscSidebarItem__action" onClick={(ev: React.MouseEvent) => { + if (ev.type === 'click') { + ev.currentTarget.focus(); + } ev.preventDefault(); ev.stopPropagation(); toggleDisplay(field); @@ -155,6 +158,9 @@ export function DiscoverField({ iconType="cross" className="dscSidebarItem__action" onClick={(ev: React.MouseEvent) => { + if (ev.type === 'click') { + ev.currentTarget.focus(); + } ev.preventDefault(); ev.stopPropagation(); toggleDisplay(field); diff --git a/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx b/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx index 1f27766a1756d..850624888b24a 100644 --- a/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx +++ b/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx @@ -30,7 +30,6 @@ import { SavedObject } from '../../../../../../core/types'; import { FIELDS_LIMIT_SETTING } from '../../../../common'; import { groupFields } from './lib/group_fields'; import { IndexPatternField, IndexPattern, UI_SETTINGS } from '../../../../../data/public'; -import { AppState } from '../../angular/discover_state'; import { getDetails } from './lib/get_details'; import { getDefaultFieldFilter, setFieldFilterProp } from './lib/field_filter'; import { getIndexPatternFieldList } from './lib/get_index_pattern_field_list'; @@ -74,10 +73,6 @@ export interface DiscoverSidebarProps { * Callback function to select another index pattern */ setIndexPattern: (id: string) => void; - /** - * Current app state, used for generating a link to visualize - */ - state: AppState; } export function DiscoverSidebar({ @@ -90,7 +85,6 @@ export function DiscoverSidebar({ onRemoveField, selectedIndexPattern, setIndexPattern, - state, }: DiscoverSidebarProps) { const [showFields, setShowFields] = useState(false); const [fields, setFields] = useState(null); @@ -111,8 +105,8 @@ export function DiscoverSidebar({ ); const getDetailsByField = useCallback( - (ipField: IndexPatternField) => getDetails(ipField, hits, columns), - [hits, columns] + (ipField: IndexPatternField) => getDetails(ipField, hits, columns, selectedIndexPattern), + [hits, columns, selectedIndexPattern] ); const popularLimit = services.uiSettings.get(FIELDS_LIMIT_SETTING); @@ -185,10 +179,10 @@ export function DiscoverSidebar({ aria-labelledby="selected_fields" data-test-subj={`fieldList-selected`} > - {selectedFields.map((field: IndexPatternField, idx: number) => { + {selectedFields.map((field: IndexPatternField) => { return (
  • @@ -260,10 +254,10 @@ export function DiscoverSidebar({ aria-labelledby="available_fields available_fields_popular" data-test-subj={`fieldList-popular`} > - {popularFields.map((field: IndexPatternField, idx: number) => { + {popularFields.map((field: IndexPatternField) => { return (
  • @@ -290,9 +284,13 @@ export function DiscoverSidebar({ aria-labelledby="available_fields" data-test-subj={`fieldList-unpopular`} > - {unpopularFields.map((field: IndexPatternField, idx: number) => { + {unpopularFields.map((field: IndexPatternField) => { return ( -
  • +
  • >, - columns: string[] + columns: string[], + indexPattern: IndexPattern ) { const details = { ...fieldCalculator.getFieldValueCounts({ hits, field, + indexPattern, count: 5, grouped: false, }), @@ -37,7 +39,7 @@ export function getDetails( }; if (details.buckets) { for (const bucket of details.buckets) { - bucket.display = field.format.convert(bucket.value); + bucket.display = indexPattern.getFormatterForField(field).convert(bucket.value); } } return details; diff --git a/src/plugins/discover/public/application/components/sidebar/lib/get_index_pattern_field_list.ts b/src/plugins/discover/public/application/components/sidebar/lib/get_index_pattern_field_list.ts index 00e00aa8e2991..c96a8f5ce17b9 100644 --- a/src/plugins/discover/public/application/components/sidebar/lib/get_index_pattern_field_list.ts +++ b/src/plugins/discover/public/application/components/sidebar/lib/get_index_pattern_field_list.ts @@ -31,6 +31,7 @@ export function getIndexPatternFieldList( difference(fieldNamesInDocs, fieldNamesInIndexPattern).forEach((unknownFieldName) => { unknownTypes.push({ + displayName: String(unknownFieldName), name: String(unknownFieldName), type: 'unknown', } as IndexPatternField); diff --git a/src/plugins/discover/public/saved_searches/saved_searches.ts b/src/plugins/discover/public/saved_searches/saved_searches.ts index 09be10b137494..0bc332ed8ec74 100644 --- a/src/plugins/discover/public/saved_searches/saved_searches.ts +++ b/src/plugins/discover/public/saved_searches/saved_searches.ts @@ -22,11 +22,7 @@ import { createSavedSearchClass } from './_saved_search'; export function createSavedSearchesLoader(services: SavedObjectKibanaServices) { const SavedSearchClass = createSavedSearchClass(services); - const savedSearchLoader = new SavedObjectLoader( - SavedSearchClass, - services.savedObjectsClient, - services.chrome - ); + const savedSearchLoader = new SavedObjectLoader(SavedSearchClass, services.savedObjectsClient); // Customize loader properties since adding an 's' on type doesn't work for type 'search' . savedSearchLoader.loaderProperties = { name: 'searches', diff --git a/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx b/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx index 5d7daaa7217ed..f3c4cae720193 100644 --- a/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx +++ b/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx @@ -31,6 +31,7 @@ import { Action } from 'src/plugins/ui_actions/public'; import { PanelOptionsMenu } from './panel_options_menu'; import { IEmbeddable } from '../../embeddables'; import { EmbeddableContext, panelBadgeTrigger, panelNotificationTrigger } from '../../triggers'; +import { uiToReactComponent } from '../../../../../kibana_react/public'; export interface PanelHeaderProps { title?: string; @@ -65,7 +66,9 @@ function renderNotifications( return notifications.map((notification) => { const context = { embeddable }; - let badge = ( + let badge = notification.MenuItem ? ( + React.createElement(uiToReactComponent(notification.MenuItem)) + ) : ( | undefined + private appList?: ReadonlyMap | undefined ) {} /** diff --git a/src/plugins/embeddable/public/plugin.tsx b/src/plugins/embeddable/public/plugin.tsx index fb09729ab71c3..2ca31994b722d 100644 --- a/src/plugins/embeddable/public/plugin.tsx +++ b/src/plugins/embeddable/public/plugin.tsx @@ -29,7 +29,6 @@ import { Plugin, ScopedHistory, PublicAppInfo, - PublicLegacyAppInfo, } from '../../../core/public'; import { EmbeddableFactoryRegistry, EmbeddableFactoryProvider } from './types'; import { bootstrap } from './bootstrap'; @@ -92,7 +91,7 @@ export class EmbeddablePublicPlugin implements Plugin; + private appList?: ReadonlyMap; private appListSubscription?: Subscription; constructor(initializerContext: PluginInitializerContext) {} diff --git a/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/global_flyout.tsx b/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/global_flyout.tsx index aa575cd64944c..4dd9cfcaff16b 100644 --- a/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/global_flyout.tsx +++ b/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/global_flyout.tsx @@ -160,7 +160,7 @@ export const useGlobalFlyout = () => { Array.from(getContents()).forEach(removeContent); } }; - }, [removeContent]); + }, [removeContent, getContents]); return { ...ctx, addContent }; }; diff --git a/src/plugins/es_ui_shared/public/components/json_editor/index.ts b/src/plugins/es_ui_shared/public/components/json_editor/index.ts index 81476a65f4215..63319baa38f5c 100644 --- a/src/plugins/es_ui_shared/public/components/json_editor/index.ts +++ b/src/plugins/es_ui_shared/public/components/json_editor/index.ts @@ -19,4 +19,4 @@ export * from './json_editor'; -export { OnJsonEditorUpdateHandler } from './use_json'; +export { OnJsonEditorUpdateHandler, JsonEditorState } from './use_json'; diff --git a/src/plugins/es_ui_shared/public/components/json_editor/json_editor.tsx b/src/plugins/es_ui_shared/public/components/json_editor/json_editor.tsx index 8c63cc8494a8b..206db5a285620 100644 --- a/src/plugins/es_ui_shared/public/components/json_editor/json_editor.tsx +++ b/src/plugins/es_ui_shared/public/components/json_editor/json_editor.tsx @@ -17,95 +17,97 @@ * under the License. */ -import React, { useCallback } from 'react'; +import React, { useCallback, useMemo } from 'react'; import { EuiFormRow, EuiCodeEditor } from '@elastic/eui'; import { debounce } from 'lodash'; -import { isJSON } from '../../../static/validators/string'; import { useJson, OnJsonEditorUpdateHandler } from './use_json'; -interface Props { - onUpdate: OnJsonEditorUpdateHandler; +interface Props { + onUpdate: OnJsonEditorUpdateHandler; label?: string; helpText?: React.ReactNode; value?: string; - defaultValue?: { [key: string]: any }; + defaultValue?: T; euiCodeEditorProps?: { [key: string]: any }; error?: string | null; } -export const JsonEditor = React.memo( - ({ - label, - helpText, +function JsonEditorComp({ + label, + helpText, + onUpdate, + value, + defaultValue, + euiCodeEditorProps, + error: propsError, +}: Props) { + const { content, setContent, error: internalError, isControlled } = useJson({ + defaultValue, onUpdate, value, - defaultValue, - euiCodeEditorProps, - error: propsError, - }: Props) => { - const isControlled = value !== undefined; + }); - const { content, setContent, error: internalError } = useJson({ - defaultValue, - onUpdate, - isControlled, - }); + const debouncedSetContent = useMemo(() => { + return debounce(setContent, 300); + }, [setContent]); - const debouncedSetContent = useCallback(debounce(setContent, 300), [setContent]); + // We let the consumer control the validation and the error message. + const error = isControlled ? propsError : internalError; - // We let the consumer control the validation and the error message. - const error = isControlled ? propsError : internalError; + const onEuiCodeEditorChange = useCallback( + (updated: string) => { + if (isControlled) { + onUpdate({ + data: { + raw: updated, + format: () => JSON.parse(updated), + }, + validate: () => { + try { + JSON.parse(updated); + return true; + } catch (e) { + return false; + } + }, + isValid: undefined, + }); + } else { + debouncedSetContent(updated); + } + }, + [isControlled, debouncedSetContent, onUpdate] + ); - const onEuiCodeEditorChange = useCallback( - (updated: string) => { - if (isControlled) { - onUpdate({ - data: { - raw: updated, - format() { - return JSON.parse(updated); - }, - }, - validate() { - return isJSON(updated); - }, - isValid: undefined, - }); - } else { - debouncedSetContent(updated); - } - }, - [isControlled] - ); + return ( + + + + ); +} - return ( - - - - ); - } -); +export const JsonEditor = React.memo(JsonEditorComp) as typeof JsonEditorComp; diff --git a/src/plugins/es_ui_shared/public/components/json_editor/use_json.ts b/src/plugins/es_ui_shared/public/components/json_editor/use_json.ts index 1b5ca5d7f4384..47d518e6814a4 100644 --- a/src/plugins/es_ui_shared/public/components/json_editor/use_json.ts +++ b/src/plugins/es_ui_shared/public/components/json_editor/use_json.ts @@ -17,24 +17,28 @@ * under the License. */ -import { useEffect, useState, useRef } from 'react'; +import { useEffect, useState, useRef, useCallback } from 'react'; import { i18n } from '@kbn/i18n'; import { isJSON } from '../../../static/validators/string'; -export type OnJsonEditorUpdateHandler = (arg: { +export interface JsonEditorState { data: { raw: string; format(): T; }; validate(): boolean; isValid: boolean | undefined; -}) => void; +} + +export type OnJsonEditorUpdateHandler = ( + arg: JsonEditorState +) => void; interface Parameters { onUpdate: OnJsonEditorUpdateHandler; defaultValue?: T; - isControlled?: boolean; + value?: string; } const stringifyJson = (json: { [key: string]: any }) => @@ -43,13 +47,16 @@ const stringifyJson = (json: { [key: string]: any }) => export const useJson = ({ defaultValue = {} as T, onUpdate, - isControlled = false, + value, }: Parameters) => { - const didMount = useRef(false); - const [content, setContent] = useState(stringifyJson(defaultValue)); + const isControlled = value !== undefined; + const isMounted = useRef(false); + const [content, setContent] = useState( + isControlled ? value! : stringifyJson(defaultValue) + ); const [error, setError] = useState(null); - const validate = () => { + const validate = useCallback(() => { // We allow empty string as it will be converted to "{}"" const isValid = content.trim() === '' ? true : isJSON(content); if (!isValid) { @@ -62,33 +69,43 @@ export const useJson = ({ setError(null); } return isValid; - }; + }, [content]); - const formatContent = () => { + const formatContent = useCallback(() => { const isValid = validate(); const data = isValid && content.trim() !== '' ? JSON.parse(content) : {}; return data as T; - }; + }, [validate, content]); useEffect(() => { - if (didMount.current) { - const isValid = isControlled ? undefined : validate(); - onUpdate({ - data: { - raw: content, - format: formatContent, - }, - validate, - isValid, - }); - } else { - didMount.current = true; + if (!isMounted.current || isControlled) { + return; } - }, [content]); + + const isValid = validate(); + + onUpdate({ + data: { + raw: content, + format: formatContent, + }, + validate, + isValid, + }); + }, [onUpdate, content, formatContent, validate, isControlled]); + + useEffect(() => { + isMounted.current = true; + + return () => { + isMounted.current = false; + }; + }, []); return { content, setContent, error, + isControlled, }; }; diff --git a/src/plugins/es_ui_shared/public/index.ts b/src/plugins/es_ui_shared/public/index.ts index bdea5ccf5fe26..5a1c13658604a 100644 --- a/src/plugins/es_ui_shared/public/index.ts +++ b/src/plugins/es_ui_shared/public/index.ts @@ -26,7 +26,7 @@ import * as Monaco from './monaco'; import * as ace from './ace'; import * as GlobalFlyout from './global_flyout'; -export { JsonEditor, OnJsonEditorUpdateHandler } from './components/json_editor'; +export { JsonEditor, OnJsonEditorUpdateHandler, JsonEditorState } from './components/json_editor'; export { SectionLoading } from './components/section_loading'; @@ -39,7 +39,7 @@ export { UseRequestResponse, sendRequest, useRequest, -} from './request/np_ready_request'; +} from './request'; export { indices } from './indices'; diff --git a/src/plugins/es_ui_shared/public/indices/validate/validate_index.test.ts b/src/plugins/es_ui_shared/public/indices/validate/validate_index.test.ts index a6792543cd726..b28cac1cb76f5 100644 --- a/src/plugins/es_ui_shared/public/indices/validate/validate_index.test.ts +++ b/src/plugins/es_ui_shared/public/indices/validate/validate_index.test.ts @@ -17,8 +17,6 @@ * under the License. */ -jest.mock('ui/new_platform'); - import { INDEX_ILLEGAL_CHARACTERS_VISIBLE } from '../constants'; import { diff --git a/src/plugins/es_ui_shared/public/request/index.ts b/src/plugins/es_ui_shared/public/request/index.ts index f942a9cc3932b..9e38bfe5ee16b 100644 --- a/src/plugins/es_ui_shared/public/request/index.ts +++ b/src/plugins/es_ui_shared/public/request/index.ts @@ -17,11 +17,5 @@ * under the License. */ -export { - SendRequestConfig, - SendRequestResponse, - UseRequestConfig, - UseRequestResponse, - sendRequest, - useRequest, -} from './request'; +export { SendRequestConfig, SendRequestResponse, sendRequest } from './send_request'; +export { UseRequestConfig, UseRequestResponse, useRequest } from './use_request'; diff --git a/src/plugins/es_ui_shared/public/request/np_ready_request.ts b/src/plugins/es_ui_shared/public/request/np_ready_request.ts deleted file mode 100644 index 06af698f2ce02..0000000000000 --- a/src/plugins/es_ui_shared/public/request/np_ready_request.ts +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { useEffect, useState, useRef, useMemo } from 'react'; - -import { HttpSetup, HttpFetchQuery } from '../../../../../src/core/public'; - -export interface SendRequestConfig { - path: string; - method: 'get' | 'post' | 'put' | 'delete' | 'patch' | 'head'; - query?: HttpFetchQuery; - body?: any; -} - -export interface SendRequestResponse { - data: D | null; - error: E | null; -} - -export interface UseRequestConfig extends SendRequestConfig { - pollIntervalMs?: number; - initialData?: any; - deserializer?: (data: any) => any; -} - -export interface UseRequestResponse { - isInitialRequest: boolean; - isLoading: boolean; - error: E | null; - data?: D | null; - sendRequest: (...args: any[]) => Promise>; -} - -export const sendRequest = async ( - httpClient: HttpSetup, - { path, method, body, query }: SendRequestConfig -): Promise> => { - try { - const stringifiedBody = typeof body === 'string' ? body : JSON.stringify(body); - const response = await httpClient[method](path, { body: stringifiedBody, query }); - - return { - data: response.data ? response.data : response, - error: null, - }; - } catch (e) { - return { - data: null, - error: e.response && e.response.data ? e.response.data : e.body, - }; - } -}; - -export const useRequest = ( - httpClient: HttpSetup, - { - path, - method, - query, - body, - pollIntervalMs, - initialData, - deserializer = (data: any): any => data, - }: UseRequestConfig -): UseRequestResponse => { - const sendRequestRef = useRef<() => Promise>>(); - // Main states for tracking request status and data - const [error, setError] = useState(null); - const [isLoading, setIsLoading] = useState(true); - const [data, setData] = useState(initialData); - - // Consumers can use isInitialRequest to implement a polling UX. - const [isInitialRequest, setIsInitialRequest] = useState(true); - const pollInterval = useRef(null); - const pollIntervalId = useRef(null); - - // We always want to use the most recently-set interval in scheduleRequest. - pollInterval.current = pollIntervalMs; - - // Tied to every render and bound to each request. - let isOutdatedRequest = false; - - const scheduleRequest = () => { - // Clear current interval - if (pollIntervalId.current) { - clearTimeout(pollIntervalId.current); - } - - // Set new interval - if (pollInterval.current) { - pollIntervalId.current = setTimeout( - () => (sendRequestRef.current ?? _sendRequest)(), - pollInterval.current - ); - } - }; - - const _sendRequest = async () => { - // We don't clear error or data, so it's up to the consumer to decide whether to display the - // "old" error/data or loading state when a new request is in-flight. - setIsLoading(true); - - const requestBody = { - path, - method, - query, - body, - }; - - const response = await sendRequest(httpClient, requestBody); - const { data: serializedResponseData, error: responseError } = response; - - // If an outdated request has resolved, DON'T update state, but DO allow the processData handler - // to execute side effects like update telemetry. - if (isOutdatedRequest) { - return { data: null, error: null }; - } - - setError(responseError); - - if (!responseError) { - const responseData = deserializer(serializedResponseData); - setData(responseData); - } - - setIsLoading(false); - setIsInitialRequest(false); - - // If we're on an interval, we need to schedule the next request. This also allows us to reset - // the interval if the user has manually requested the data, to avoid doubled-up requests. - scheduleRequest(); - - return { data: serializedResponseData, error: responseError }; - }; - - useEffect(() => { - sendRequestRef.current = _sendRequest; - }, [_sendRequest]); - - const stringifiedQuery = useMemo(() => JSON.stringify(query), [query]); - - useEffect(() => { - (sendRequestRef.current ?? _sendRequest)(); - // To be functionally correct we'd send a new request if the method, path, query or body changes. - // But it doesn't seem likely that the method will change and body is likely to be a new - // object even if its shape hasn't changed, so for now we're just watching the path and the query. - }, [path, stringifiedQuery]); - - useEffect(() => { - scheduleRequest(); - - // Clean up intervals and inflight requests and corresponding state changes - return () => { - isOutdatedRequest = true; - if (pollIntervalId.current) { - clearTimeout(pollIntervalId.current); - } - }; - }, [pollIntervalMs]); - - return { - isInitialRequest, - isLoading, - error, - data, - sendRequest: sendRequestRef.current ?? _sendRequest, // Gives the user the ability to manually request data - }; -}; diff --git a/src/plugins/es_ui_shared/public/request/request.test.js b/src/plugins/es_ui_shared/public/request/request.test.js deleted file mode 100644 index 190c32517eefe..0000000000000 --- a/src/plugins/es_ui_shared/public/request/request.test.js +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -// import { sendRequest as sendRequestUnbound, useRequest as useRequestUnbound } from './request'; - -import React from 'react'; -import { act } from 'react-dom/test-utils'; -import { mount } from 'enzyme'; - -const TestHook = ({ callback }) => { - callback(); - return null; -}; - -let element; - -const testHook = (callback) => { - element = mount(); -}; - -const wait = async (wait) => new Promise((resolve) => setTimeout(resolve, wait || 1)); - -// FLAKY: -// - https://github.com/elastic/kibana/issues/42561 -// - https://github.com/elastic/kibana/issues/42562 -// - https://github.com/elastic/kibana/issues/42563 -// - https://github.com/elastic/kibana/issues/42225 -describe.skip('request lib', () => { - const successRequest = { path: '/success', method: 'post', body: {} }; - const errorRequest = { path: '/error', method: 'post', body: {} }; - const successResponse = { statusCode: 200, data: { message: 'Success message' } }; - const errorResponse = { statusCode: 400, statusText: 'Error message' }; - - let sendPost; - let sendRequest; - let useRequest; - - /** - * - * commented out due to hooks being called regardless of skip - * https://github.com/facebook/jest/issues/8379 - - beforeEach(() => { - sendPost = sinon.stub(); - sendPost.withArgs(successRequest.path, successRequest.body).returns(successResponse); - sendPost.withArgs(errorRequest.path, errorRequest.body).throws(errorResponse); - - const httpClient = { - post: (...args) => { - return sendPost(...args); - }, - }; - - sendRequest = sendRequestUnbound.bind(null, httpClient); - useRequest = useRequestUnbound.bind(null, httpClient); - }); - - */ - - describe('sendRequest function', () => { - it('uses the provided path, method, and body to send the request', async () => { - const response = await sendRequest({ ...successRequest }); - sinon.assert.calledOnce(sendPost); - expect(response).toEqual({ data: successResponse.data, error: null }); - }); - - it('surfaces errors', async () => { - try { - await sendRequest({ ...errorRequest }); - } catch (e) { - sinon.assert.calledOnce(sendPost); - expect(e).toBe(errorResponse.error); - } - }); - }); - - describe('useRequest hook', () => { - let hook; - - function initUseRequest(config) { - act(() => { - testHook(() => { - hook = useRequest(config); - }); - }); - } - - describe('parameters', () => { - describe('path, method, body', () => { - it('is used to send the request', async () => { - initUseRequest({ ...successRequest }); - await wait(50); - expect(hook.data).toBe(successResponse.data); - }); - }); - - describe('pollIntervalMs', () => { - it('sends another request after the specified time has elapsed', async () => { - initUseRequest({ ...successRequest, pollIntervalMs: 10 }); - await wait(50); - // We just care that multiple requests have been sent out. We don't check the specific - // timing because that risks introducing flakiness into the tests, and it's unlikely - // we could break the implementation by getting the exact timing wrong. - expect(sendPost.callCount).toBeGreaterThan(1); - - // We have to manually clean up or else the interval will continue to fire requests, - // interfering with other tests. - element.unmount(); - }); - }); - - describe('initialData', () => { - it('sets the initial data value', () => { - initUseRequest({ ...successRequest, initialData: 'initialData' }); - expect(hook.data).toBe('initialData'); - }); - }); - - describe('deserializer', () => { - it('is called once the request resolves', async () => { - const deserializer = sinon.stub(); - initUseRequest({ ...successRequest, deserializer }); - sinon.assert.notCalled(deserializer); - - await wait(50); - sinon.assert.calledOnce(deserializer); - sinon.assert.calledWith(deserializer, successResponse.data); - }); - - it('processes data', async () => { - initUseRequest({ ...successRequest, deserializer: () => 'intercepted' }); - await wait(50); - expect(hook.data).toBe('intercepted'); - }); - }); - }); - - describe('state', () => { - describe('isInitialRequest', () => { - it('is true for the first request and false for subsequent requests', async () => { - initUseRequest({ ...successRequest }); - expect(hook.isInitialRequest).toBe(true); - - hook.sendRequest(); - await wait(50); - expect(hook.isInitialRequest).toBe(false); - }); - }); - - describe('isLoading', () => { - it('represents in-flight request status', async () => { - initUseRequest({ ...successRequest }); - expect(hook.isLoading).toBe(true); - - await wait(50); - expect(hook.isLoading).toBe(false); - }); - }); - - describe('error', () => { - it('surfaces errors from requests', async () => { - initUseRequest({ ...errorRequest }); - await wait(50); - expect(hook.error).toBe(errorResponse); - }); - - it('persists while a request is in-flight', async () => { - initUseRequest({ ...errorRequest }); - await wait(50); - hook.sendRequest(); - expect(hook.isLoading).toBe(true); - expect(hook.error).toBe(errorResponse); - }); - - it('is null when the request is successful', async () => { - initUseRequest({ ...successRequest }); - await wait(50); - expect(hook.isLoading).toBe(false); - expect(hook.error).toBeNull(); - }); - }); - - describe('data', () => { - it('surfaces payloads from requests', async () => { - initUseRequest({ ...successRequest }); - await wait(50); - expect(hook.data).toBe(successResponse.data); - }); - - it('persists while a request is in-flight', async () => { - initUseRequest({ ...successRequest }); - await wait(50); - hook.sendRequest(); - expect(hook.isLoading).toBe(true); - expect(hook.data).toBe(successResponse.data); - }); - - it('is null when the request fails', async () => { - initUseRequest({ ...errorRequest }); - await wait(50); - expect(hook.isLoading).toBe(false); - expect(hook.data).toBeNull(); - }); - }); - }); - - describe('callbacks', () => { - describe('sendRequest', () => { - it('sends the request', () => { - initUseRequest({ ...successRequest }); - sinon.assert.calledOnce(sendPost); - hook.sendRequest(); - sinon.assert.calledTwice(sendPost); - }); - - it('resets the pollIntervalMs', async () => { - initUseRequest({ ...successRequest, pollIntervalMs: 800 }); - await wait(200); // 200ms - hook.sendRequest(); - expect(sendPost.callCount).toBe(2); - - await wait(200); // 400ms - hook.sendRequest(); - - await wait(200); // 600ms - hook.sendRequest(); - - await wait(200); // 800ms - hook.sendRequest(); - - await wait(200); // 1000ms - hook.sendRequest(); - - // If sendRequest didn't reset the interval, the interval would have triggered another - // request by now, and the callCount would be 7. - expect(sendPost.callCount).toBe(6); - - // We have to manually clean up or else the interval will continue to fire requests, - // interfering with other tests. - element.unmount(); - }); - }); - }); - }); -}); diff --git a/src/plugins/es_ui_shared/public/request/request.ts b/src/plugins/es_ui_shared/public/request/request.ts deleted file mode 100644 index fd6980367136e..0000000000000 --- a/src/plugins/es_ui_shared/public/request/request.ts +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { useEffect, useState, useRef } from 'react'; - -export interface SendRequestConfig { - path: string; - method: 'get' | 'post' | 'put' | 'delete' | 'patch' | 'head'; - body?: any; -} - -export interface SendRequestResponse { - data: any; - error: Error | null; -} - -export interface UseRequestConfig extends SendRequestConfig { - pollIntervalMs?: number; - initialData?: any; - deserializer?: (data: any) => any; -} - -export interface UseRequestResponse { - isInitialRequest: boolean; - isLoading: boolean; - error: null | unknown; - data: any; - sendRequest: (...args: any[]) => Promise; -} - -export const sendRequest = async ( - httpClient: ng.IHttpService, - { path, method, body }: SendRequestConfig -): Promise => { - try { - const response = await (httpClient as any)[method](path, body); - - if (typeof response.data === 'undefined') { - throw new Error(response.statusText); - } - - return { data: response.data, error: null }; - } catch (e) { - return { - data: null, - error: e.response ? e.response : e, - }; - } -}; - -export const useRequest = ( - httpClient: ng.IHttpService, - { - path, - method, - body, - pollIntervalMs, - initialData, - deserializer = (data: any): any => data, - }: UseRequestConfig -): UseRequestResponse => { - // Main states for tracking request status and data - const [error, setError] = useState(null); - const [isLoading, setIsLoading] = useState(true); - const [data, setData] = useState(initialData); - - // Consumers can use isInitialRequest to implement a polling UX. - const [isInitialRequest, setIsInitialRequest] = useState(true); - const pollInterval = useRef(null); - const pollIntervalId = useRef(null); - - // We always want to use the most recently-set interval in scheduleRequest. - pollInterval.current = pollIntervalMs; - - // Tied to every render and bound to each request. - let isOutdatedRequest = false; - - const scheduleRequest = () => { - // Clear current interval - if (pollIntervalId.current) { - clearTimeout(pollIntervalId.current); - } - - // Set new interval - if (pollInterval.current) { - pollIntervalId.current = setTimeout(_sendRequest, pollInterval.current); - } - }; - - const _sendRequest = async () => { - // We don't clear error or data, so it's up to the consumer to decide whether to display the - // "old" error/data or loading state when a new request is in-flight. - setIsLoading(true); - - const requestBody = { - path, - method, - body, - }; - - const response = await sendRequest(httpClient, requestBody); - const { data: serializedResponseData, error: responseError } = response; - const responseData = deserializer(serializedResponseData); - - // If an outdated request has resolved, DON'T update state, but DO allow the processData handler - // to execute side effects like update telemetry. - if (isOutdatedRequest) { - return { data: null, error: null }; - } - - setError(responseError); - setData(responseData); - setIsLoading(false); - setIsInitialRequest(false); - - // If we're on an interval, we need to schedule the next request. This also allows us to reset - // the interval if the user has manually requested the data, to avoid doubled-up requests. - scheduleRequest(); - - return { data: serializedResponseData, error: responseError }; - }; - - useEffect(() => { - _sendRequest(); - // To be functionally correct we'd send a new request if the method, path, or body changes. - // But it doesn't seem likely that the method will change and body is likely to be a new - // object even if its shape hasn't changed, so for now we're just watching the path. - }, [path]); - - useEffect(() => { - scheduleRequest(); - - // Clean up intervals and inflight requests and corresponding state changes - return () => { - isOutdatedRequest = true; - if (pollIntervalId.current) { - clearTimeout(pollIntervalId.current); - } - }; - }, [pollIntervalMs]); - - return { - isInitialRequest, - isLoading, - error, - data, - sendRequest: _sendRequest, // Gives the user the ability to manually request data - }; -}; diff --git a/src/plugins/es_ui_shared/public/request/send_request.test.helpers.ts b/src/plugins/es_ui_shared/public/request/send_request.test.helpers.ts new file mode 100644 index 0000000000000..4312780e74c0f --- /dev/null +++ b/src/plugins/es_ui_shared/public/request/send_request.test.helpers.ts @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import sinon from 'sinon'; + +import { HttpSetup, HttpFetchOptions } from '../../../../../src/core/public'; +import { + SendRequestConfig, + SendRequestResponse, + sendRequest as originalSendRequest, +} from './send_request'; + +export interface SendRequestHelpers { + getSendRequestSpy: () => sinon.SinonStub; + sendSuccessRequest: () => Promise; + getSuccessResponse: () => SendRequestResponse; + sendErrorRequest: () => Promise; + getErrorResponse: () => SendRequestResponse; +} + +const successRequest: SendRequestConfig = { method: 'post', path: '/success', body: {} }; +const successResponse = { statusCode: 200, data: { message: 'Success message' } }; + +const errorValue = { statusCode: 400, statusText: 'Error message' }; +const errorRequest: SendRequestConfig = { method: 'post', path: '/error', body: {} }; +const errorResponse = { response: { data: errorValue } }; + +export const createSendRequestHelpers = (): SendRequestHelpers => { + const sendRequestSpy = sinon.stub(); + const httpClient = { + post: (path: string, options: HttpFetchOptions) => sendRequestSpy(path, options), + }; + const sendRequest = originalSendRequest.bind(null, httpClient as HttpSetup) as ( + config: SendRequestConfig + ) => Promise>; + + // Set up successful request helpers. + sendRequestSpy + .withArgs(successRequest.path, { + body: JSON.stringify(successRequest.body), + query: undefined, + }) + .resolves(successResponse); + const sendSuccessRequest = () => sendRequest({ ...successRequest }); + const getSuccessResponse = () => ({ data: successResponse.data, error: null }); + + // Set up failed request helpers. + sendRequestSpy + .withArgs(errorRequest.path, { + body: JSON.stringify(errorRequest.body), + query: undefined, + }) + .rejects(errorResponse); + const sendErrorRequest = () => sendRequest({ ...errorRequest }); + const getErrorResponse = () => ({ + data: null, + error: errorResponse.response.data, + }); + + return { + getSendRequestSpy: () => sendRequestSpy, + sendSuccessRequest, + getSuccessResponse, + sendErrorRequest, + getErrorResponse, + }; +}; diff --git a/src/plugins/es_ui_shared/public/request/send_request.test.ts b/src/plugins/es_ui_shared/public/request/send_request.test.ts new file mode 100644 index 0000000000000..e4deaeaba817e --- /dev/null +++ b/src/plugins/es_ui_shared/public/request/send_request.test.ts @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import sinon from 'sinon'; + +import { SendRequestHelpers, createSendRequestHelpers } from './send_request.test.helpers'; + +describe('sendRequest function', () => { + let helpers: SendRequestHelpers; + + beforeEach(() => { + helpers = createSendRequestHelpers(); + }); + + it('uses the provided path, method, and body to send the request', async () => { + const { sendSuccessRequest, getSendRequestSpy, getSuccessResponse } = helpers; + + const response = await sendSuccessRequest(); + sinon.assert.calledOnce(getSendRequestSpy()); + expect(response).toEqual(getSuccessResponse()); + }); + + it('surfaces errors', async () => { + const { sendErrorRequest, getSendRequestSpy, getErrorResponse } = helpers; + + // For some reason sinon isn't throwing an error on rejection, as an awaited Promise normally would. + const error = await sendErrorRequest(); + sinon.assert.calledOnce(getSendRequestSpy()); + expect(error).toEqual(getErrorResponse()); + }); +}); diff --git a/src/plugins/es_ui_shared/public/request/send_request.ts b/src/plugins/es_ui_shared/public/request/send_request.ts new file mode 100644 index 0000000000000..453e91570cd85 --- /dev/null +++ b/src/plugins/es_ui_shared/public/request/send_request.ts @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { HttpSetup, HttpFetchQuery } from '../../../../../src/core/public'; + +export interface SendRequestConfig { + path: string; + method: 'get' | 'post' | 'put' | 'delete' | 'patch' | 'head'; + query?: HttpFetchQuery; + body?: any; +} + +export interface SendRequestResponse { + data: D | null; + error: E | null; +} + +export const sendRequest = async ( + httpClient: HttpSetup, + { path, method, body, query }: SendRequestConfig +): Promise> => { + try { + const stringifiedBody = typeof body === 'string' ? body : JSON.stringify(body); + const response = await httpClient[method](path, { body: stringifiedBody, query }); + + return { + data: response.data ? response.data : response, + error: null, + }; + } catch (e) { + return { + data: null, + error: e.response?.data ?? e.body, + }; + } +}; diff --git a/src/plugins/es_ui_shared/public/request/use_request.test.helpers.tsx b/src/plugins/es_ui_shared/public/request/use_request.test.helpers.tsx new file mode 100644 index 0000000000000..0d6fd122ad22c --- /dev/null +++ b/src/plugins/es_ui_shared/public/request/use_request.test.helpers.tsx @@ -0,0 +1,184 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { mount, ReactWrapper } from 'enzyme'; +import sinon from 'sinon'; + +import { HttpSetup, HttpFetchOptions } from '../../../../../src/core/public'; +import { SendRequestConfig, SendRequestResponse } from './send_request'; +import { useRequest, UseRequestResponse, UseRequestConfig } from './use_request'; + +export interface UseRequestHelpers { + advanceTime: (ms: number) => Promise; + completeRequest: () => Promise; + hookResult: UseRequestResponse; + getSendRequestSpy: () => sinon.SinonStub; + setupSuccessRequest: (overrides?: {}, requestTimings?: number[]) => void; + getSuccessResponse: () => SendRequestResponse; + setupErrorRequest: (overrides?: {}, requestTimings?: number[]) => void; + getErrorResponse: () => SendRequestResponse; + setErrorResponse: (overrides?: {}) => void; + setupErrorWithBodyRequest: (overrides?: {}) => void; + getErrorWithBodyResponse: () => SendRequestResponse; +} + +// Each request will take 1s to resolve. +export const REQUEST_TIME = 1000; + +const successRequest: SendRequestConfig = { method: 'post', path: '/success', body: {} }; +const successResponse = { statusCode: 200, data: { message: 'Success message' } }; + +const errorValue = { statusCode: 400, statusText: 'Error message' }; +const errorRequest: SendRequestConfig = { method: 'post', path: '/error', body: {} }; +const errorResponse = { response: { data: errorValue } }; + +const errorWithBodyRequest: SendRequestConfig = { + method: 'post', + path: '/errorWithBody', + body: {}, +}; +const errorWithBodyResponse = { body: errorValue }; + +export const createUseRequestHelpers = (): UseRequestHelpers => { + // The behavior we're testing involves state changes over time, so we need finer control over + // timing. + jest.useFakeTimers(); + + const flushPromiseJobQueue = async () => { + // See https://stackoverflow.com/questions/52177631/jest-timer-and-promise-dont-work-well-settimeout-and-async-function + await Promise.resolve(); + }; + + const completeRequest = async () => { + await act(async () => { + jest.runAllTimers(); + await flushPromiseJobQueue(); + }); + }; + + const advanceTime = async (ms: number) => { + await act(async () => { + jest.advanceTimersByTime(ms); + await flushPromiseJobQueue(); + }); + }; + + let element: ReactWrapper; + // We'll use this object to observe the state of the hook and access its callback(s). + const hookResult = {} as UseRequestResponse; + const sendRequestSpy = sinon.stub(); + + const setupUseRequest = (config: UseRequestConfig, requestTimings?: number[]) => { + let requestCount = 0; + + const httpClient = { + post: (path: string, options: HttpFetchOptions) => { + return new Promise((resolve, reject) => { + // Increase the time it takes to resolve a request so we have time to inspect the hook + // as it goes through various states. + setTimeout(() => { + try { + resolve(sendRequestSpy(path, options)); + } catch (e) { + reject(e); + } + }, (requestTimings && requestTimings[requestCount++]) || REQUEST_TIME); + }); + }, + }; + + const TestComponent = ({ requestConfig }: { requestConfig: UseRequestConfig }) => { + const { isInitialRequest, isLoading, error, data, sendRequest } = useRequest( + httpClient as HttpSetup, + requestConfig + ); + + hookResult.isInitialRequest = isInitialRequest; + hookResult.isLoading = isLoading; + hookResult.error = error; + hookResult.data = data; + hookResult.sendRequest = sendRequest; + + return null; + }; + + act(() => { + element = mount(); + }); + }; + + // Set up successful request helpers. + sendRequestSpy + .withArgs(successRequest.path, { + body: JSON.stringify(successRequest.body), + query: undefined, + }) + .resolves(successResponse); + const setupSuccessRequest = (overrides = {}, requestTimings?: number[]) => + setupUseRequest({ ...successRequest, ...overrides }, requestTimings); + const getSuccessResponse = () => ({ data: successResponse.data, error: null }); + + // Set up failed request helpers. + sendRequestSpy + .withArgs(errorRequest.path, { + body: JSON.stringify(errorRequest.body), + query: undefined, + }) + .rejects(errorResponse); + const setupErrorRequest = (overrides = {}, requestTimings?: number[]) => + setupUseRequest({ ...errorRequest, ...overrides }, requestTimings); + const getErrorResponse = () => ({ + data: null, + error: errorResponse.response.data, + }); + // We'll use this to change a success response to an error response, to test how the state changes. + const setErrorResponse = (overrides = {}) => { + element.setProps({ requestConfig: { ...errorRequest, ...overrides } }); + }; + + // Set up failed request helpers with the alternative error shape. + sendRequestSpy + .withArgs(errorWithBodyRequest.path, { + body: JSON.stringify(errorWithBodyRequest.body), + query: undefined, + }) + .rejects(errorWithBodyResponse); + const setupErrorWithBodyRequest = (overrides = {}) => + setupUseRequest({ ...errorWithBodyRequest, ...overrides }); + const getErrorWithBodyResponse = () => ({ + data: null, + error: errorWithBodyResponse.body, + }); + + return { + advanceTime, + completeRequest, + hookResult, + getSendRequestSpy: () => sendRequestSpy, + setupSuccessRequest, + getSuccessResponse, + setupErrorRequest, + getErrorResponse, + setErrorResponse, + setupErrorWithBodyRequest, + getErrorWithBodyResponse, + }; +}; diff --git a/src/plugins/es_ui_shared/public/request/use_request.test.ts b/src/plugins/es_ui_shared/public/request/use_request.test.ts new file mode 100644 index 0000000000000..f7902218d9314 --- /dev/null +++ b/src/plugins/es_ui_shared/public/request/use_request.test.ts @@ -0,0 +1,353 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { act } from 'react-dom/test-utils'; +import sinon from 'sinon'; + +import { + UseRequestHelpers, + REQUEST_TIME, + createUseRequestHelpers, +} from './use_request.test.helpers'; + +describe('useRequest hook', () => { + let helpers: UseRequestHelpers; + + beforeEach(() => { + helpers = createUseRequestHelpers(); + }); + + describe('parameters', () => { + describe('path, method, body', () => { + it('is used to send the request', async () => { + const { setupSuccessRequest, completeRequest, hookResult, getSuccessResponse } = helpers; + setupSuccessRequest(); + await completeRequest(); + expect(hookResult.data).toBe(getSuccessResponse().data); + }); + }); + + describe('pollIntervalMs', () => { + it('sends another request after the specified time has elapsed', async () => { + const { setupSuccessRequest, advanceTime, getSendRequestSpy } = helpers; + setupSuccessRequest({ pollIntervalMs: REQUEST_TIME }); + + await advanceTime(REQUEST_TIME); + expect(getSendRequestSpy().callCount).toBe(1); + + // We need to advance (1) the pollIntervalMs and (2) the request time. + await advanceTime(REQUEST_TIME * 2); + expect(getSendRequestSpy().callCount).toBe(2); + + // We need to advance (1) the pollIntervalMs and (2) the request time. + await advanceTime(REQUEST_TIME * 2); + expect(getSendRequestSpy().callCount).toBe(3); + }); + }); + + describe('initialData', () => { + it('sets the initial data value', async () => { + const { setupSuccessRequest, completeRequest, hookResult, getSuccessResponse } = helpers; + setupSuccessRequest({ initialData: 'initialData' }); + expect(hookResult.data).toBe('initialData'); + + // The initial data value will be overwritten once the request resolves. + await completeRequest(); + expect(hookResult.data).toBe(getSuccessResponse().data); + }); + }); + + describe('deserializer', () => { + it('is called with the response once the request resolves', async () => { + const { setupSuccessRequest, completeRequest, getSuccessResponse } = helpers; + + const deserializer = sinon.stub(); + setupSuccessRequest({ deserializer }); + sinon.assert.notCalled(deserializer); + await completeRequest(); + + sinon.assert.calledOnce(deserializer); + sinon.assert.calledWith(deserializer, getSuccessResponse().data); + }); + + it('provides the data return value', async () => { + const { setupSuccessRequest, completeRequest, hookResult } = helpers; + setupSuccessRequest({ deserializer: () => 'intercepted' }); + await completeRequest(); + expect(hookResult.data).toBe('intercepted'); + }); + }); + }); + + describe('state', () => { + describe('isInitialRequest', () => { + it('is true for the first request and false for subsequent requests', async () => { + const { setupSuccessRequest, completeRequest, hookResult } = helpers; + setupSuccessRequest(); + expect(hookResult.isInitialRequest).toBe(true); + + hookResult.sendRequest(); + await completeRequest(); + expect(hookResult.isInitialRequest).toBe(false); + }); + }); + + describe('isLoading', () => { + it('represents in-flight request status', async () => { + const { setupSuccessRequest, completeRequest, hookResult } = helpers; + setupSuccessRequest(); + expect(hookResult.isLoading).toBe(true); + + await completeRequest(); + expect(hookResult.isLoading).toBe(false); + }); + }); + + describe('error', () => { + it('surfaces errors from requests', async () => { + const { setupErrorRequest, completeRequest, hookResult, getErrorResponse } = helpers; + setupErrorRequest(); + await completeRequest(); + expect(hookResult.error).toBe(getErrorResponse().error); + }); + + it('surfaces body-shaped errors from requests', async () => { + const { + setupErrorWithBodyRequest, + completeRequest, + hookResult, + getErrorWithBodyResponse, + } = helpers; + + setupErrorWithBodyRequest(); + await completeRequest(); + expect(hookResult.error).toBe(getErrorWithBodyResponse().error); + }); + + it('persists while a request is in-flight', async () => { + const { setupErrorRequest, completeRequest, hookResult, getErrorResponse } = helpers; + setupErrorRequest(); + await completeRequest(); + expect(hookResult.isLoading).toBe(false); + expect(hookResult.error).toBe(getErrorResponse().error); + + act(() => { + hookResult.sendRequest(); + }); + expect(hookResult.isLoading).toBe(true); + expect(hookResult.error).toBe(getErrorResponse().error); + }); + + it('is null when the request is successful', async () => { + const { setupSuccessRequest, completeRequest, hookResult } = helpers; + setupSuccessRequest(); + expect(hookResult.error).toBeNull(); + + await completeRequest(); + expect(hookResult.isLoading).toBe(false); + expect(hookResult.error).toBeNull(); + }); + }); + + describe('data', () => { + it('surfaces payloads from requests', async () => { + const { setupSuccessRequest, completeRequest, hookResult, getSuccessResponse } = helpers; + setupSuccessRequest(); + expect(hookResult.data).toBeUndefined(); + + await completeRequest(); + expect(hookResult.data).toBe(getSuccessResponse().data); + }); + + it('persists while a request is in-flight', async () => { + const { setupSuccessRequest, completeRequest, hookResult, getSuccessResponse } = helpers; + setupSuccessRequest(); + await completeRequest(); + expect(hookResult.isLoading).toBe(false); + expect(hookResult.data).toBe(getSuccessResponse().data); + + act(() => { + hookResult.sendRequest(); + }); + expect(hookResult.isLoading).toBe(true); + expect(hookResult.data).toBe(getSuccessResponse().data); + }); + + it('persists from last successful request when the next request fails', async () => { + const { + setupSuccessRequest, + completeRequest, + hookResult, + getErrorResponse, + setErrorResponse, + getSuccessResponse, + } = helpers; + + setupSuccessRequest(); + await completeRequest(); + expect(hookResult.isLoading).toBe(false); + expect(hookResult.error).toBeNull(); + expect(hookResult.data).toBe(getSuccessResponse().data); + + setErrorResponse(); + await completeRequest(); + expect(hookResult.isLoading).toBe(false); + expect(hookResult.error).toBe(getErrorResponse().error); + expect(hookResult.data).toBe(getSuccessResponse().data); + }); + }); + }); + + describe('callbacks', () => { + describe('sendRequest', () => { + it('sends the request', async () => { + const { setupSuccessRequest, completeRequest, hookResult, getSendRequestSpy } = helpers; + setupSuccessRequest(); + + await completeRequest(); + expect(getSendRequestSpy().callCount).toBe(1); + + await act(async () => { + hookResult.sendRequest(); + await completeRequest(); + }); + expect(getSendRequestSpy().callCount).toBe(2); + }); + + it('resets the pollIntervalMs', async () => { + const { setupSuccessRequest, advanceTime, hookResult, getSendRequestSpy } = helpers; + const DOUBLE_REQUEST_TIME = REQUEST_TIME * 2; + setupSuccessRequest({ pollIntervalMs: DOUBLE_REQUEST_TIME }); + + // The initial request resolves, and then we'll immediately send a new one manually... + await advanceTime(REQUEST_TIME); + expect(getSendRequestSpy().callCount).toBe(1); + act(() => { + hookResult.sendRequest(); + }); + + // The manual request resolves, and we'll send yet another one... + await advanceTime(REQUEST_TIME); + expect(getSendRequestSpy().callCount).toBe(2); + act(() => { + hookResult.sendRequest(); + }); + + // At this point, we've moved forward 3s. The poll is set at 2s. If sendRequest didn't + // reset the poll, the request call count would be 4, not 3. + await advanceTime(REQUEST_TIME); + expect(getSendRequestSpy().callCount).toBe(3); + }); + }); + }); + + describe('request behavior', () => { + it('outdated responses are ignored by poll requests', async () => { + const { + setupSuccessRequest, + setErrorResponse, + completeRequest, + hookResult, + getErrorResponse, + getSendRequestSpy, + } = helpers; + const DOUBLE_REQUEST_TIME = REQUEST_TIME * 2; + // Send initial request, which will have a longer round-trip time. + setupSuccessRequest({}, [DOUBLE_REQUEST_TIME]); + + // Send a new request, which will have a shorter round-trip time. + setErrorResponse(); + + // Complete both requests. + await completeRequest(); + + // Two requests were sent... + expect(getSendRequestSpy().callCount).toBe(2); + // ...but the error response is the one that takes precedence because it was *sent* more + // recently, despite the success response *returning* more recently. + expect(hookResult.error).toBe(getErrorResponse().error); + expect(hookResult.data).toBeUndefined(); + }); + + it(`outdated responses are ignored if there's a more recently-sent manual request`, async () => { + const { setupSuccessRequest, advanceTime, hookResult, getSendRequestSpy } = helpers; + + const HALF_REQUEST_TIME = REQUEST_TIME * 0.5; + setupSuccessRequest({ pollIntervalMs: REQUEST_TIME }); + + // Before the original request resolves, we make a manual sendRequest call. + await advanceTime(HALF_REQUEST_TIME); + expect(getSendRequestSpy().callCount).toBe(0); + act(() => { + hookResult.sendRequest(); + }); + + // The original quest resolves but it's been marked as outdated by the the manual sendRequest + // call "interrupts", so data is left undefined. + await advanceTime(HALF_REQUEST_TIME); + expect(getSendRequestSpy().callCount).toBe(1); + expect(hookResult.data).toBeUndefined(); + }); + + it(`changing pollIntervalMs doesn't trigger a new request`, async () => { + const { setupErrorRequest, setErrorResponse, completeRequest, getSendRequestSpy } = helpers; + const DOUBLE_REQUEST_TIME = REQUEST_TIME * 2; + // Send initial request. + setupErrorRequest({ pollIntervalMs: REQUEST_TIME }); + + // Setting a new poll will schedule a second request, but not send one immediately. + setErrorResponse({ pollIntervalMs: DOUBLE_REQUEST_TIME }); + + // Complete initial request. + await completeRequest(); + + // Complete scheduled poll request. + await completeRequest(); + expect(getSendRequestSpy().callCount).toBe(2); + }); + + it('when the path changes after a request is scheduled, the scheduled request is sent with that path', async () => { + const { + setupSuccessRequest, + completeRequest, + hookResult, + getErrorResponse, + setErrorResponse, + getSendRequestSpy, + } = helpers; + const DOUBLE_REQUEST_TIME = REQUEST_TIME * 2; + + // Sned first request and schedule a request, both with the success path. + setupSuccessRequest({ pollIntervalMs: DOUBLE_REQUEST_TIME }); + + // Change the path to the error path, sending a second request. pollIntervalMs is the same + // so the originally scheduled poll remains cheduled. + setErrorResponse({ pollIntervalMs: DOUBLE_REQUEST_TIME }); + + // Complete the initial request, the requests by the path change, and the scheduled poll request. + await completeRequest(); + await completeRequest(); + + // If the scheduled poll request was sent to the success path, we wouldn't have an error result. + // But we do, because it was sent to the error path. + expect(getSendRequestSpy().callCount).toBe(3); + expect(hookResult.error).toBe(getErrorResponse().error); + }); + }); +}); diff --git a/src/plugins/es_ui_shared/public/request/use_request.ts b/src/plugins/es_ui_shared/public/request/use_request.ts new file mode 100644 index 0000000000000..481843bf40e88 --- /dev/null +++ b/src/plugins/es_ui_shared/public/request/use_request.ts @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { useEffect, useCallback, useState, useRef, useMemo } from 'react'; + +import { HttpSetup } from '../../../../../src/core/public'; +import { + sendRequest as sendStatelessRequest, + SendRequestConfig, + SendRequestResponse, +} from './send_request'; + +export interface UseRequestConfig extends SendRequestConfig { + pollIntervalMs?: number; + initialData?: any; + deserializer?: (data: any) => any; +} + +export interface UseRequestResponse { + isInitialRequest: boolean; + isLoading: boolean; + error: E | null; + data?: D | null; + sendRequest: () => Promise>; +} + +export const useRequest = ( + httpClient: HttpSetup, + { path, method, query, body, pollIntervalMs, initialData, deserializer }: UseRequestConfig +): UseRequestResponse => { + const isMounted = useRef(false); + + // Main states for tracking request status and data + const [error, setError] = useState(null); + const [isLoading, setIsLoading] = useState(true); + const [data, setData] = useState(initialData); + + // Consumers can use isInitialRequest to implement a polling UX. + const requestCountRef = useRef(0); + const isInitialRequest = requestCountRef.current === 0; + const pollIntervalIdRef = useRef(null); + + const clearPollInterval = useCallback(() => { + if (pollIntervalIdRef.current) { + clearTimeout(pollIntervalIdRef.current); + pollIntervalIdRef.current = null; + } + }, []); + + // Convert our object to string to be able to compare them in our useMemo, + // allowing the consumer to freely passed new objects to the hook on each + // render without requiring them to be memoized. + const queryStringified = query ? JSON.stringify(query) : undefined; + const bodyStringified = body ? JSON.stringify(body) : undefined; + + const requestBody = useMemo(() => { + return { + path, + method, + query: queryStringified ? query : undefined, + body: bodyStringified ? body : undefined, + }; + // queryStringified and bodyStringified stand in for query and body as dependencies. + /* eslint-disable-next-line react-hooks/exhaustive-deps */ + }, [path, method, queryStringified, bodyStringified]); + + const sendRequest = useCallback(async () => { + // If we're on an interval, this allows us to reset it if the user has manually requested the + // data, to avoid doubled-up requests. + clearPollInterval(); + + const requestId = ++requestCountRef.current; + + // We don't clear error or data, so it's up to the consumer to decide whether to display the + // "old" error/data or loading state when a new request is in-flight. + setIsLoading(true); + + const response = await sendStatelessRequest(httpClient, requestBody); + const { data: serializedResponseData, error: responseError } = response; + + const isOutdatedRequest = requestId !== requestCountRef.current; + const isUnmounted = isMounted.current === false; + + // Ignore outdated or irrelevant data. + if (isOutdatedRequest || isUnmounted) { + return { data: null, error: null }; + } + + setError(responseError); + // If there's an error, keep the data from the last request in case it's still useful to the user. + if (!responseError) { + const responseData = deserializer + ? deserializer(serializedResponseData) + : serializedResponseData; + setData(responseData); + } + // Setting isLoading to false also acts as a signal for scheduling the next poll request. + setIsLoading(false); + + return { data: serializedResponseData, error: responseError }; + }, [requestBody, httpClient, deserializer, clearPollInterval]); + + const scheduleRequest = useCallback(() => { + // If there's a scheduled poll request, this new one will supersede it. + clearPollInterval(); + + if (pollIntervalMs) { + pollIntervalIdRef.current = setTimeout(sendRequest, pollIntervalMs); + } + }, [pollIntervalMs, sendRequest, clearPollInterval]); + + // Send the request on component mount and whenever the dependencies of sendRequest() change. + useEffect(() => { + sendRequest(); + }, [sendRequest]); + + // Schedule the next poll request when the previous one completes. + useEffect(() => { + // When a request completes, attempt to schedule the next one. Note that we aren't re-scheduling + // a request whenever sendRequest's dependencies change. isLoading isn't set to false until the + // initial request has completed, so we won't schedule a request on mount. + if (!isLoading) { + scheduleRequest(); + } + }, [isLoading, scheduleRequest]); + + useEffect(() => { + isMounted.current = true; + + return () => { + isMounted.current = false; + + // Clean up on unmount. + clearPollInterval(); + }; + }, [clearPollInterval]); + + return { + isInitialRequest, + isLoading, + error, + data, + sendRequest, // Gives the user the ability to manually request data + }; +}; diff --git a/src/plugins/es_ui_shared/static/forms/components/fields/combobox_field.tsx b/src/plugins/es_ui_shared/static/forms/components/fields/combobox_field.tsx index 9fb804eb7fafa..b2f1a70341315 100644 --- a/src/plugins/es_ui_shared/static/forms/components/fields/combobox_field.tsx +++ b/src/plugins/es_ui_shared/static/forms/components/fields/combobox_field.tsx @@ -74,7 +74,7 @@ export const ComboBoxField = ({ field, euiFieldProps = {}, ...rest }: Props) => }; const onSearchComboChange = (value: string) => { - if (value) { + if (value !== undefined) { field.clearErrors(VALIDATION_TYPES.ARRAY_ITEM); } }; diff --git a/src/plugins/es_ui_shared/static/forms/components/fields/range_field.tsx b/src/plugins/es_ui_shared/static/forms/components/fields/range_field.tsx index e02c9b6c18615..9ffa7adace781 100644 --- a/src/plugins/es_ui_shared/static/forms/components/fields/range_field.tsx +++ b/src/plugins/es_ui_shared/static/forms/components/fields/range_field.tsx @@ -31,15 +31,16 @@ interface Props { export const RangeField = ({ field, euiFieldProps = {}, ...rest }: Props) => { const { isInvalid, errorMessage } = getFieldValidityAndErrorMessage(field); + const { onChange: onFieldChange } = field; const onChange = useCallback( (e: React.ChangeEvent | React.MouseEvent) => { const event = ({ ...e, value: `${e.currentTarget.value}` } as unknown) as React.ChangeEvent<{ value: string; }>; - field.onChange(event); + onFieldChange(event); }, - [field.onChange] + [onFieldChange] ); return ( diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form.tsx b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form.tsx index b3a15fea8b187..287ac56243446 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form.tsx +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form.tsx @@ -21,6 +21,7 @@ import React, { ReactNode } from 'react'; import { EuiForm } from '@elastic/eui'; import { FormProvider } from '../form_context'; +import { FormDataContextProvider } from '../form_data_context'; import { FormHook } from '../types'; interface Props { @@ -30,8 +31,14 @@ interface Props { [key: string]: any; } -export const Form = ({ form, FormWrapper = EuiForm, ...rest }: Props) => ( - - - -); +export const Form = ({ form, FormWrapper = EuiForm, ...rest }: Props) => { + const { getFormData, __getFormData$ } = form; + + return ( + + + + + + ); +}; diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.test.tsx b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.test.tsx index 25448dff18e8a..d9095944eaa33 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.test.tsx +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.test.tsx @@ -75,16 +75,7 @@ describe('', () => { setInputValue('lastNameField', 'updated value'); }); - /** - * The children will be rendered three times: - * - Twice for each input value that has changed - * - once because after updating both fields, the **form** isValid state changes (from "undefined" to "true") - * causing a new "form" object to be returned and thus a re-render. - * - * When the form object will be memoized (in a future PR), te bellow call count should only be 2 as listening - * to form data changes should not receive updates when the "isValid" state of the form changes. - */ - expect(onFormData.mock.calls.length).toBe(3); + expect(onFormData).toBeCalledTimes(2); const [formDataUpdated] = onFormData.mock.calls[onFormData.mock.calls.length - 1] as Parameters< OnUpdateHandler @@ -130,7 +121,7 @@ describe('', () => { find, } = setup() as TestBed; - expect(onFormData.mock.calls.length).toBe(0); // Not present in the DOM yet + expect(onFormData).toBeCalledTimes(0); // Not present in the DOM yet // Make some changes to the form fields await act(async () => { @@ -188,7 +179,7 @@ describe('', () => { setInputValue('lastNameField', 'updated value'); }); - expect(onFormData.mock.calls.length).toBe(0); + expect(onFormData).toBeCalledTimes(0); }); test('props.pathsToWatch (Array): should not re-render the children when the field that changed is not in the watch list', async () => { @@ -228,14 +219,14 @@ describe('', () => { }); // No re-render - expect(onFormData.mock.calls.length).toBe(0); + expect(onFormData).toBeCalledTimes(0); // Make some changes to fields in the watch list await act(async () => { setInputValue('nameField', 'updated value'); }); - expect(onFormData.mock.calls.length).toBe(1); + expect(onFormData).toBeCalledTimes(1); onFormData.mockReset(); diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts index 3630b902f0564..ac141baf8fc71 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts @@ -17,10 +17,10 @@ * under the License. */ -import React, { useState, useEffect, useRef, useCallback } from 'react'; +import React from 'react'; import { FormData } from '../types'; -import { useFormContext } from '../form_context'; +import { useFormData } from '../hooks'; interface Props { children: (formData: FormData) => JSX.Element | null; @@ -28,46 +28,9 @@ interface Props { } export const FormDataProvider = React.memo(({ children, pathsToWatch }: Props) => { - const form = useFormContext(); - const { subscribe } = form; - const previousRawData = useRef(form.__getFormData$().value); - const isMounted = useRef(false); - const [formData, setFormData] = useState(previousRawData.current); + const { 0: formData, 2: isReady } = useFormData({ watch: pathsToWatch }); - const onFormData = useCallback( - ({ data: { raw } }) => { - // To avoid re-rendering the children for updates on the form data - // that we are **not** interested in, we can specify one or multiple path(s) - // to watch. - if (pathsToWatch) { - const valuesToWatchArray = Array.isArray(pathsToWatch) - ? (pathsToWatch as string[]) - : ([pathsToWatch] as string[]); - - if (valuesToWatchArray.some((value) => previousRawData.current[value] !== raw[value])) { - previousRawData.current = raw; - setFormData(raw); - } - } else { - setFormData(raw); - } - }, - [pathsToWatch] - ); - - useEffect(() => { - const subscription = subscribe(onFormData); - return subscription.unsubscribe; - }, [subscribe, onFormData]); - - useEffect(() => { - isMounted.current = true; - return () => { - isMounted.current = false; - }; - }, []); - - if (!isMounted.current && Object.keys(formData).length === 0) { + if (!isReady) { // No field has mounted yet, don't render anything return null; } diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts index 3688421964d2e..d0ac0d4ab28c3 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts @@ -17,18 +17,25 @@ * under the License. */ -import { useState, useEffect, useRef, useCallback } from 'react'; +import { useEffect, useRef, useCallback, useMemo } from 'react'; +import { FormHook, FieldConfig } from '../types'; +import { getFieldValidityAndErrorMessage } from '../helpers'; import { useFormContext } from '../form_context'; +import { useField, InternalFieldConfig } from '../hooks'; interface Props { path: string; initialNumberOfItems?: number; readDefaultValueOnForm?: boolean; + validations?: FieldConfig['validations']; children: (args: { items: ArrayItem[]; + error: string | null; addItem: () => void; removeItem: (id: number) => void; + moveItem: (sourceIdx: number, destinationIdx: number) => void; + form: FormHook; }) => JSX.Element; } @@ -56,32 +63,62 @@ export interface ArrayItem { export const UseArray = ({ path, initialNumberOfItems, + validations, readDefaultValueOnForm = true, children, }: Props) => { - const didMountRef = useRef(false); - const form = useFormContext(); - const defaultValues = readDefaultValueOnForm && (form.getFieldDefaultValue(path) as any[]); + const isMounted = useRef(false); const uniqueId = useRef(0); - const getInitialItemsFromValues = (values: any[]): ArrayItem[] => - values.map((_, index) => ({ + const form = useFormContext(); + const { getFieldDefaultValue } = form; + + const getNewItemAtIndex = useCallback( + (index: number): ArrayItem => ({ id: uniqueId.current++, path: `${path}[${index}]`, - isNew: false, - })); + isNew: true, + }), + [path] + ); + + const fieldDefaultValue = useMemo(() => { + const defaultValues = readDefaultValueOnForm + ? (getFieldDefaultValue(path) as any[]) + : undefined; - const getNewItemAtIndex = (index: number): ArrayItem => ({ - id: uniqueId.current++, - path: `${path}[${index}]`, - isNew: true, - }); + const getInitialItemsFromValues = (values: any[]): ArrayItem[] => + values.map((_, index) => ({ + id: uniqueId.current++, + path: `${path}[${index}]`, + isNew: false, + })); - const initialState = defaultValues - ? getInitialItemsFromValues(defaultValues) - : new Array(initialNumberOfItems).fill('').map((_, i) => getNewItemAtIndex(i)); + return defaultValues + ? getInitialItemsFromValues(defaultValues) + : new Array(initialNumberOfItems).fill('').map((_, i) => getNewItemAtIndex(i)); + }, [path, initialNumberOfItems, readDefaultValueOnForm, getFieldDefaultValue, getNewItemAtIndex]); - const [items, setItems] = useState(initialState); + // Create a new hook field with the "hasValue" set to false so we don't use its value to build the final form data. + // Apart from that the field behaves like a normal field and is hooked into the form validation lifecycle. + const fieldConfigBase: FieldConfig & InternalFieldConfig = { + defaultValue: fieldDefaultValue, + errorDisplayDelay: 0, + isIncludedInOutput: false, + }; + + const fieldConfig: FieldConfig & InternalFieldConfig = validations + ? { validations, ...fieldConfigBase } + : fieldConfigBase; + + const field = useField(form, path, fieldConfig); + const { setValue, value, isChangingValue, errors } = field; + + // Derived state from the field + const error = useMemo(() => { + const { errorMessage } = getFieldValidityAndErrorMessage({ isChangingValue, errors }); + return errorMessage; + }, [isChangingValue, errors]); const updatePaths = useCallback( (_rows: ArrayItem[]) => { @@ -96,29 +133,51 @@ export const UseArray = ({ [path] ); - const addItem = () => { - setItems((previousItems) => { + const addItem = useCallback(() => { + setValue((previousItems) => { const itemIndex = previousItems.length; return [...previousItems, getNewItemAtIndex(itemIndex)]; }); - }; + }, [setValue, getNewItemAtIndex]); - const removeItem = (id: number) => { - setItems((previousItems) => { - const updatedItems = previousItems.filter((item) => item.id !== id); - return updatePaths(updatedItems); - }); - }; + const removeItem = useCallback( + (id: number) => { + setValue((previousItems) => { + const updatedItems = previousItems.filter((item) => item.id !== id); + return updatePaths(updatedItems); + }); + }, + [setValue, updatePaths] + ); - useEffect(() => { - if (didMountRef.current) { - setItems((prev) => { - return updatePaths(prev); + const moveItem = useCallback( + (sourceIdx: number, destinationIdx: number) => { + setValue((previousItems) => { + const nextItems = [...previousItems]; + const removed = nextItems.splice(sourceIdx, 1)[0]; + nextItems.splice(destinationIdx, 0, removed); + return updatePaths(nextItems); }); - } else { - didMountRef.current = true; + }, + [setValue, updatePaths] + ); + + useEffect(() => { + if (!isMounted.current) { + return; } - }, [path, updatePaths]); - return children({ items, addItem, removeItem }); + setValue((prev) => { + return updatePaths(prev); + }); + }, [path, updatePaths, setValue]); + + useEffect(() => { + isMounted.current = true; + return () => { + isMounted.current = false; + }; + }, []); + + return children({ items: value, error, form, addItem, removeItem, moveItem }); }; diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx index a55b2f0a8fa29..c14471991ccd3 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx @@ -64,6 +64,93 @@ describe('', () => { }); }); + describe('validation', () => { + let formHook: FormHook | null = null; + + beforeEach(() => { + formHook = null; + }); + + const onFormHook = (form: FormHook) => { + formHook = form; + }; + + const getTestComp = (fieldConfig: FieldConfig) => { + const TestComp = ({ onForm }: { onForm: (form: FormHook) => void }) => { + const { form } = useForm(); + + useEffect(() => { + onForm(form); + }, [onForm, form]); + + return ( +
    + + + ); + }; + return TestComp; + }; + + const setup = (fieldConfig: FieldConfig) => { + return registerTestBed(getTestComp(fieldConfig), { + memoryRouter: { wrapComponent: false }, + defaultProps: { onForm: onFormHook }, + })() as TestBed; + }; + + test('should update the form validity whenever the field value changes', async () => { + const fieldConfig: FieldConfig = { + defaultValue: '', // empty string, which is not valid + validations: [ + { + validator: ({ value }) => { + // Validate that string is not empty + if ((value as string).trim() === '') { + return { message: 'Error: field is empty.' }; + } + }, + }, + ], + }; + + // Mount our TestComponent + const { + form: { setInputValue }, + } = setup(fieldConfig); + + if (formHook === null) { + throw new Error('FormHook object has not been set.'); + } + + let { isValid } = formHook; + expect(isValid).toBeUndefined(); // Initially the form validity is undefined... + + await act(async () => { + await formHook!.validate(); // ...until we validate the form + }); + + ({ isValid } = formHook); + expect(isValid).toBe(false); + + // Change to a non empty string to pass validation + await act(async () => { + setInputValue('myField', 'changedValue'); + }); + + ({ isValid } = formHook); + expect(isValid).toBe(true); + + // Change back to an empty string to fail validation + await act(async () => { + setInputValue('myField', ''); + }); + + ({ isValid } = formHook); + expect(isValid).toBe(false); + }); + }); + describe('serializer(), deserializer(), formatter()', () => { interface MyForm { name: string; diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/form_data_context.tsx b/src/plugins/es_ui_shared/static/forms/hook_form_lib/form_data_context.tsx new file mode 100644 index 0000000000000..0e6a75e9c5065 --- /dev/null +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/form_data_context.tsx @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { createContext, useContext, useMemo } from 'react'; + +import { FormData, FormHook } from './types'; +import { Subject } from './lib'; + +export interface Context { + getFormData$: () => Subject; + getFormData: FormHook['getFormData']; +} + +const FormDataContext = createContext | undefined>(undefined); + +interface Props extends Context { + children: React.ReactNode; +} + +export const FormDataContextProvider = ({ children, getFormData$, getFormData }: Props) => { + const value = useMemo( + () => ({ + getFormData, + getFormData$, + }), + [getFormData, getFormData$] + ); + + return {children}; +}; + +export function useFormDataContext() { + return useContext | undefined>(FormDataContext); +} diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/helpers.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/helpers.ts index 7ea42f81b43cb..a148dc543542b 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/helpers.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/helpers.ts @@ -19,9 +19,10 @@ import { FieldHook } from './types'; -export const getFieldValidityAndErrorMessage = ( - field: FieldHook -): { isInvalid: boolean; errorMessage: string | null } => { +export const getFieldValidityAndErrorMessage = (field: { + isChangingValue: FieldHook['isChangingValue']; + errors: FieldHook['errors']; +}): { isInvalid: boolean; errorMessage: string | null } => { const isInvalid = !field.isChangingValue && field.errors.length > 0; const errorMessage = !field.isChangingValue && field.errors.length ? field.errors[0].message : null; diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/index.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/index.ts index 6a04a592227f9..aa9610dd85ae3 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/index.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/index.ts @@ -17,5 +17,6 @@ * under the License. */ -export { useField } from './use_field'; +export { useField, InternalFieldConfig } from './use_field'; export { useForm } from './use_form'; +export { useFormData } from './use_form_data'; diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_field.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_field.ts index 9d22e4eb2ee5e..bb4aae6eccae8 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_field.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_field.ts @@ -22,16 +22,22 @@ import { useMemo, useState, useEffect, useRef, useCallback } from 'react'; import { FormHook, FieldHook, FieldConfig, FieldValidateResponse, ValidationError } from '../types'; import { FIELD_TYPES, VALIDATION_TYPES } from '../constants'; +export interface InternalFieldConfig { + initialValue?: T; + isIncludedInOutput?: boolean; +} + export const useField = ( form: FormHook, path: string, - config: FieldConfig & { initialValue?: T } = {}, + config: FieldConfig & InternalFieldConfig = {}, valueChangeListener?: (value: T) => void ) => { const { type = FIELD_TYPES.TEXT, defaultValue = '', // The value to use a fallback mecanism when no initial value is passed initialValue = config.defaultValue ?? '', // The value explicitly passed + isIncludedInOutput = true, label = '', labelAppend = '', helpText = '', @@ -69,11 +75,12 @@ export const useField = ( const [isChangingValue, setIsChangingValue] = useState(false); const [isValidated, setIsValidated] = useState(false); + const isMounted = useRef(false); const validateCounter = useRef(0); const changeCounter = useRef(0); + const hasBeenReset = useRef(false); const inflightValidation = useRef | null>(null); const debounceTimeout = useRef(null); - const isMounted = useRef(false); // -- HELPERS // ---------------------------------- @@ -142,11 +149,7 @@ export const useField = ( __updateFormDataAt(path, value); // Validate field(s) (that will update form.isValid state) - // We only validate if the value is different than the initial or default value - // to avoid validating after a form.reset() call. - if (value !== initialValue && value !== defaultValue) { - await __validateFields(fieldsToValidateOnChange ?? [path]); - } + await __validateFields(fieldsToValidateOnChange ?? [path]); if (isMounted.current === false) { return; @@ -172,8 +175,6 @@ export const useField = ( }, [ path, value, - defaultValue, - initialValue, valueChangeListener, errorDisplayDelay, fieldsToValidateOnChange, @@ -206,7 +207,7 @@ export const useField = ( validationTypeToValidate, }: { formData: any; - value: unknown; + value: T; validationTypeToValidate?: string; }): ValidationError[] | Promise => { if (!validations) { @@ -239,7 +240,7 @@ export const useField = ( } inflightValidation.current = validator({ - value: (valueToValidate as unknown) as string, + value: valueToValidate, errors: validationErrors, form: { getFormData, getFields }, formData, @@ -254,6 +255,8 @@ export const useField = ( validationErrors.push({ ...validationResult, + // See comment below that explains why we add "__isBlocking__". + __isBlocking__: validationResult.__isBlocking__ ?? validation.isBlocking, validationType: validationType || VALIDATION_TYPES.FIELD, }); @@ -283,7 +286,7 @@ export const useField = ( } const validationResult = validator({ - value: (valueToValidate as unknown) as string, + value: valueToValidate, errors: validationErrors, form: { getFormData, getFields }, formData, @@ -306,6 +309,11 @@ export const useField = ( validationErrors.push({ ...(validationResult as ValidationError), + // We add an "__isBlocking__" property to know if this error is a blocker or no. + // Most validation errors are blockers but in some cases a validation is more a warning than an error + // like with the ComboBox items when they are added. + __isBlocking__: + (validationResult as ValidationError).__isBlocking__ ?? validation.isBlocking, validationType: validationType || VALIDATION_TYPES.FIELD, }); @@ -386,15 +394,27 @@ export const useField = ( */ const setValue: FieldHook['setValue'] = useCallback( (newValue) => { - const formattedValue = formatInputValue(newValue); - setStateValue(formattedValue); - return formattedValue; + setStateValue((prev) => { + let formattedValue: T; + if (typeof newValue === 'function') { + formattedValue = formatInputValue((newValue as Function)(prev)); + } else { + formattedValue = formatInputValue(newValue); + } + return formattedValue; + }); }, [formatInputValue] ); const _setErrors: FieldHook['setErrors'] = useCallback((_errors) => { - setErrors(_errors.map((error) => ({ validationType: VALIDATION_TYPES.FIELD, ...error }))); + setErrors( + _errors.map((error) => ({ + validationType: VALIDATION_TYPES.FIELD, + __isBlocking__: true, + ...error, + })) + ); }, []); /** @@ -455,6 +475,7 @@ export const useField = ( setErrors([]); if (resetValue) { + hasBeenReset.current = true; const newValue = deserializeValue(updatedDefaultValue ?? defaultValue); setValue(newValue); return newValue; @@ -463,7 +484,8 @@ export const useField = ( [setValue, deserializeValue, defaultValue] ); - const isValid = errors.length === 0; + // Don't take into account non blocker validation. Some are just warning (like trying to add a wrong ComboBox item) + const isValid = errors.filter((e) => e.__isBlocking__ !== false).length === 0; const field = useMemo>(() => { return { @@ -486,6 +508,7 @@ export const useField = ( clearErrors, validate, reset, + __isIncludedInOutput: isIncludedInOutput, __serializeValue: serializeValue, }; }, [ @@ -501,6 +524,7 @@ export const useField = ( isValidating, isValidated, isChangingValue, + isIncludedInOutput, onChange, getErrorsMessages, setValue, @@ -525,6 +549,13 @@ export const useField = ( }, [path, __removeField]); useEffect(() => { + // If the field value has been reset, we don't want to call the "onValueChange()" + // as it will set the "isPristine" state to true or validate the field, which initially we don't want. + if (hasBeenReset.current) { + hasBeenReset.current = false; + return; + } + if (!isMounted.current) { return; } diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.test.tsx b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.test.tsx index 007e492243bac..edcd84daf5d2f 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.test.tsx +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.test.tsx @@ -22,7 +22,13 @@ import { act } from 'react-dom/test-utils'; import { registerTestBed, getRandomString, TestBed } from '../shared_imports'; import { Form, UseField } from '../components'; -import { FormSubmitHandler, OnUpdateHandler, FormHook, ValidationFunc } from '../types'; +import { + FormSubmitHandler, + OnUpdateHandler, + FormHook, + ValidationFunc, + FieldConfig, +} from '../types'; import { useForm } from './use_form'; interface MyForm { @@ -39,7 +45,7 @@ const onFormHook = (_form: FormHook) => { formHook = _form; }; -describe('use_form() hook', () => { +describe('useForm() hook', () => { beforeEach(() => { formHook = null; }); @@ -441,5 +447,57 @@ describe('use_form() hook', () => { deeply: { nested: { value: '' } }, // Fallback to empty string as no config was provided }); }); + + test('should not validate the fields after resetting its value (form validity should be undefined)', async () => { + const fieldConfig: FieldConfig = { + defaultValue: '', + validations: [ + { + validator: ({ value }) => { + if ((value as string).trim() === '') { + return { message: 'Error: empty string' }; + } + }, + }, + ], + }; + + const TestResetComp = () => { + const { form } = useForm(); + + useEffect(() => { + formHook = form; + }, [form]); + + return ( +
    + + + ); + }; + + const { + form: { setInputValue }, + } = registerTestBed(TestResetComp, { + memoryRouter: { wrapComponent: false }, + })() as TestBed; + + let { isValid } = formHook!; + expect(isValid).toBeUndefined(); + + await act(async () => { + setInputValue('myField', 'changedValue'); + }); + ({ isValid } = formHook!); + expect(isValid).toBe(true); + + await act(async () => { + // When we reset the form, value is back to "", which is invalid for the field + formHook!.reset(); + }); + + ({ isValid } = formHook!); + expect(isValid).toBeUndefined(); // Make sure it is "undefined" and not "false". + }); }); }); diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.ts index 35bac5b9a58c6..b390c17d3c2ff 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form.ts @@ -95,19 +95,25 @@ export function useForm( const fieldsToArray = useCallback(() => Object.values(fieldsRefs.current), []); - const stripEmptyFields = useCallback( - (fields: FieldsMap): FieldsMap => { - if (formOptions.stripEmptyFields) { - return Object.entries(fields).reduce((acc, [key, field]) => { - if (typeof field.value !== 'string' || field.value.trim() !== '') { - acc[key] = field; - } + const getFieldsForOutput = useCallback( + (fields: FieldsMap, opts: { stripEmptyFields: boolean }): FieldsMap => { + return Object.entries(fields).reduce((acc, [key, field]) => { + if (!field.__isIncludedInOutput) { return acc; - }, {} as FieldsMap); - } - return fields; + } + + if (opts.stripEmptyFields) { + const isFieldEmpty = typeof field.value === 'string' && field.value.trim() === ''; + if (isFieldEmpty) { + return acc; + } + } + + acc[key] = field; + return acc; + }, {} as FieldsMap); }, - [formOptions] + [] ); const updateFormDataAt: FormHook['__updateFormDataAt'] = useCallback( @@ -133,8 +139,10 @@ export function useForm( const getFormData: FormHook['getFormData'] = useCallback( (getDataOptions: Parameters['getFormData']>[0] = { unflatten: true }) => { if (getDataOptions.unflatten) { - const nonEmptyFields = stripEmptyFields(fieldsRefs.current); - const fieldsValue = mapFormFields(nonEmptyFields, (field) => field.__serializeValue()); + const fieldsToOutput = getFieldsForOutput(fieldsRefs.current, { + stripEmptyFields: formOptions.stripEmptyFields, + }); + const fieldsValue = mapFormFields(fieldsToOutput, (field) => field.__serializeValue()); return serializer ? (serializer(unflattenObject(fieldsValue)) as T) : (unflattenObject(fieldsValue) as T); @@ -148,7 +156,7 @@ export function useForm( {} as T ); }, - [stripEmptyFields, serializer] + [getFieldsForOutput, formOptions.stripEmptyFields, serializer] ); const getErrors: FormHook['getErrors'] = useCallback(() => { @@ -240,6 +248,12 @@ export function useForm( if (!field.isValidated) { setIsValid(undefined); + + // When we submit the form (and set "isSubmitted" to "true"), we validate **all fields**. + // If a field is added and it is not validated it means that we have swapped fields and added new ones: + // --> we have basically have a new form in front of us. + // For that reason we make sure that the "isSubmitted" state is false. + setIsSubmitted(false); } }, [updateFormDataAt] @@ -389,6 +403,7 @@ export function useForm( isValid, id, submit: submitForm, + validate: validateAllFields, subscribe, setFieldValue, setFieldErrors, @@ -428,6 +443,7 @@ export function useForm( addField, removeField, validateFields, + validateAllFields, ]); useEffect(() => { diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form_data.test.tsx b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form_data.test.tsx new file mode 100644 index 0000000000000..0fb65daecf2f4 --- /dev/null +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form_data.test.tsx @@ -0,0 +1,234 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { useEffect } from 'react'; +import { act } from 'react-dom/test-utils'; + +import { registerTestBed, TestBed } from '../shared_imports'; +import { Form, UseField } from '../components'; +import { useForm } from './use_form'; +import { useFormData, HookReturn } from './use_form_data'; + +interface Props { + onChange(data: HookReturn): void; + watch?: string | string[]; +} + +describe('useFormData() hook', () => { + const HookListenerComp = React.memo(({ onChange, watch }: Props) => { + const hookValue = useFormData({ watch }); + + useEffect(() => { + onChange(hookValue); + }, [hookValue, onChange]); + + return null; + }); + + describe('form data updates', () => { + let testBed: TestBed; + let onChangeSpy: jest.Mock; + + const getLastMockValue = () => { + return onChangeSpy.mock.calls[onChangeSpy.mock.calls.length - 1][0] as HookReturn; + }; + + const TestComp = (props: Props) => { + const { form } = useForm(); + + return ( +
    + + + + ); + }; + + const setup = registerTestBed(TestComp, { + memoryRouter: { wrapComponent: false }, + }); + + beforeEach(() => { + onChangeSpy = jest.fn(); + testBed = setup({ onChange: onChangeSpy }) as TestBed; + }); + + test('should return the form data', () => { + // Called twice: + // once when the hook is called and once when the fields have mounted and updated the form data + expect(onChangeSpy).toBeCalledTimes(2); + const [data] = getLastMockValue(); + expect(data).toEqual({ title: 'titleInitialValue' }); + }); + + test('should listen to field changes', async () => { + const { + form: { setInputValue }, + } = testBed; + + await act(async () => { + setInputValue('titleField', 'titleChanged'); + }); + + expect(onChangeSpy).toBeCalledTimes(3); + const [data] = getLastMockValue(); + expect(data).toEqual({ title: 'titleChanged' }); + }); + }); + + describe('format form data', () => { + let onChangeSpy: jest.Mock; + + const getLastMockValue = () => { + return onChangeSpy.mock.calls[onChangeSpy.mock.calls.length - 1][0] as HookReturn; + }; + + const TestComp = (props: Props) => { + const { form } = useForm(); + + return ( +
    + + + + + ); + }; + + const setup = registerTestBed(TestComp, { + memoryRouter: { wrapComponent: false }, + }); + + beforeEach(() => { + onChangeSpy = jest.fn(); + setup({ onChange: onChangeSpy }); + }); + + test('should expose a handler to build the form data', () => { + const { 1: format } = getLastMockValue(); + expect(format()).toEqual({ + user: { + firstName: 'John', + lastName: 'Snow', + }, + }); + }); + }); + + describe('options', () => { + describe('watch', () => { + let testBed: TestBed; + let onChangeSpy: jest.Mock; + + const getLastMockValue = () => { + return onChangeSpy.mock.calls[onChangeSpy.mock.calls.length - 1][0] as HookReturn; + }; + + const TestComp = (props: Props) => { + const { form } = useForm(); + + return ( +
    + + + + + ); + }; + + const setup = registerTestBed(TestComp, { + memoryRouter: { wrapComponent: false }, + }); + + beforeEach(() => { + onChangeSpy = jest.fn(); + testBed = setup({ watch: 'title', onChange: onChangeSpy }) as TestBed; + }); + + test('should not listen to changes on fields we are not interested in', async () => { + const { + form: { setInputValue }, + } = testBed; + + await act(async () => { + // Changing a field we are **not** interested in + setInputValue('subTitleField', 'subTitleChanged'); + // Changing a field we **are** interested in + setInputValue('titleField', 'titleChanged'); + }); + + const [data] = getLastMockValue(); + expect(data).toEqual({ title: 'titleChanged', subTitle: 'subTitleInitialValue' }); + }); + }); + + describe('form', () => { + let testBed: TestBed; + let onChangeSpy: jest.Mock; + + const getLastMockValue = () => { + return onChangeSpy.mock.calls[onChangeSpy.mock.calls.length - 1][0] as HookReturn; + }; + + const TestComp = ({ onChange }: Props) => { + const { form } = useForm(); + const hookValue = useFormData({ form }); + + useEffect(() => { + onChange(hookValue); + }, [hookValue, onChange]); + + return ( +
    + + + ); + }; + + const setup = registerTestBed(TestComp, { + memoryRouter: { wrapComponent: false }, + }); + + beforeEach(() => { + onChangeSpy = jest.fn(); + testBed = setup({ onChange: onChangeSpy }) as TestBed; + }); + + test('should allow a form to be provided when the hook is called outside of the FormDataContext', async () => { + const { + form: { setInputValue }, + } = testBed; + + const [initialData] = getLastMockValue(); + expect(initialData).toEqual({ title: 'titleInitialValue' }); + + await act(async () => { + setInputValue('titleField', 'titleChanged'); + }); + + const [updatedData] = getLastMockValue(); + expect(updatedData).toEqual({ title: 'titleChanged' }); + }); + }); + }); +}); diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form_data.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form_data.ts new file mode 100644 index 0000000000000..fb4a0984438ad --- /dev/null +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/hooks/use_form_data.ts @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { useState, useEffect, useRef, useCallback } from 'react'; + +import { FormData, FormHook } from '../types'; +import { useFormDataContext, Context } from '../form_data_context'; + +interface Options { + watch?: string | string[]; + form?: FormHook; +} + +export type HookReturn = [FormData, () => T, boolean]; + +export const useFormData = (options: Options = {}): HookReturn => { + const { watch, form } = options; + const ctx = useFormDataContext(); + + let getFormData: Context['getFormData']; + let getFormData$: Context['getFormData$']; + + if (form !== undefined) { + getFormData = form.getFormData; + getFormData$ = form.__getFormData$; + } else if (ctx !== undefined) { + ({ getFormData, getFormData$ } = ctx); + } else { + throw new Error( + 'useFormData() must be used within a or you need to pass FormHook object in the options.' + ); + } + + const initialValue = getFormData$().value; + + const previousRawData = useRef(initialValue); + const isMounted = useRef(false); + const [formData, setFormData] = useState(previousRawData.current); + + const formatFormData = useCallback(() => { + return getFormData({ unflatten: true }); + }, [getFormData]); + + useEffect(() => { + const subscription = getFormData$().subscribe((raw) => { + if (watch) { + const valuesToWatchArray = Array.isArray(watch) + ? (watch as string[]) + : ([watch] as string[]); + + if (valuesToWatchArray.some((value) => previousRawData.current[value] !== raw[value])) { + previousRawData.current = raw; + // Only update the state if one of the field we watch has changed. + setFormData(raw); + } + } else { + setFormData(raw); + } + }); + return subscription.unsubscribe; + }, [getFormData$, watch]); + + useEffect(() => { + isMounted.current = true; + return () => { + isMounted.current = false; + }; + }, []); + + if (!isMounted.current && Object.keys(formData).length === 0) { + // No field has mounted yet + return [formData, formatFormData, false]; + } + + return [formData, formatFormData, true]; +}; diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/index.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/index.ts index 3079814c9ad14..8d6b57fbeb315 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/index.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/index.ts @@ -19,7 +19,7 @@ // Only export the useForm hook. The "useField" hook is for internal use // as the consumer of the library must use the component -export { useForm } from './hooks'; +export { useForm, useFormData } from './hooks'; export { getFieldValidityAndErrorMessage } from './helpers'; export * from './form_context'; diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/types.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/types.ts index dc495f6eb56b4..18b8f478f7c0e 100644 --- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/types.ts +++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/types.ts @@ -30,6 +30,7 @@ export interface FormHook { readonly isValid: boolean | undefined; readonly id: string; submit: (e?: FormEvent | MouseEvent) => Promise<{ data: T; isValid: boolean }>; + validate: () => Promise; subscribe: (handler: OnUpdateHandler) => Subscription; setFieldValue: (fieldName: string, value: FieldValue) => void; setFieldErrors: (fieldName: string, errors: ValidationError[]) => void; @@ -107,15 +108,18 @@ export interface FieldHook { errorCode?: string; }) => string | null; onChange: (event: ChangeEvent<{ name?: string; value: string; checked?: boolean }>) => void; - setValue: (value: T) => T; + setValue: (value: T | ((prevValue: T) => T)) => void; setErrors: (errors: ValidationError[]) => void; clearErrors: (type?: string | string[]) => void; validate: (validateData?: { formData?: any; - value?: unknown; + value?: T; validationType?: string; }) => FieldValidateResponse | Promise; reset: (options?: { resetValue?: boolean; defaultValue?: T }) => unknown | undefined; + // Flag to indicate if the field value will be included in the form data outputted + // when calling form.getFormData(); + __isIncludedInOutput: boolean; __serializeValue: (rawValue?: unknown) => unknown; } @@ -126,7 +130,7 @@ export interface FieldConfig { readonly helpText?: string | ReactNode; readonly type?: HTMLInputElement['type']; readonly defaultValue?: ValueType; - readonly validations?: Array>; + readonly validations?: Array>; readonly formatters?: FormatterFunc[]; readonly deserializer?: SerializerFunc; readonly serializer?: SerializerFunc; @@ -147,6 +151,7 @@ export interface ValidationError { message: string; code?: T; validationType?: string; + __isBlocking__?: boolean; [key: string]: any; } @@ -161,8 +166,8 @@ export interface ValidationFuncArg { errors: readonly ValidationError[]; } -export type ValidationFunc = ( - data: ValidationFuncArg +export type ValidationFunc = ( + data: ValidationFuncArg ) => ValidationError | void | undefined | Promise | void | undefined>; export interface FieldValidateResponse { @@ -182,8 +187,14 @@ type FormatterFunc = (value: any, formData: FormData) => unknown; // string | number | boolean | string[] ... type FieldValue = unknown; -export interface ValidationConfig { - validator: ValidationFunc; +export interface ValidationConfig { + validator: ValidationFunc; type?: string; + /** + * By default all validation are blockers, which means that if they fail, the field is invalid. + * In some cases, like when trying to add an item to the ComboBox, if the item is not valid we want + * to show a validation error. But this validation is **not** blocking. Simply, the item has not been added. + */ + isBlocking?: boolean; exitOnFail?: boolean; } diff --git a/src/plugins/home/public/application/application.tsx b/src/plugins/home/public/application/application.tsx index 5d71bf8651d88..a4a158bc7dbde 100644 --- a/src/plugins/home/public/application/application.tsx +++ b/src/plugins/home/public/application/application.tsx @@ -47,6 +47,13 @@ export const renderApp = async ( chrome.setBreadcrumbs([{ text: homeTitle }]); + // dispatch synthetic hash change event to update hash history objects + // this is necessary because hash updates triggered by using popState won't trigger this event naturally. + // This must be called before the app is mounted to avoid call this after the redirect to default app logic kicks in + const unlisten = history.listen((location) => { + window.dispatchEvent(new HashChangeEvent('hashchange')); + }); + render( @@ -54,12 +61,6 @@ export const renderApp = async ( element ); - // dispatch synthetic hash change event to update hash history objects - // this is necessary because hash updates triggered by using popState won't trigger this event naturally. - const unlisten = history.listen(() => { - window.dispatchEvent(new HashChangeEvent('hashchange')); - }); - return () => { unmountComponentAtNode(element); unlisten(); diff --git a/src/plugins/home/public/application/components/tutorial/tutorial.js b/src/plugins/home/public/application/components/tutorial/tutorial.js index 8139bc6d38ab1..f55462bdc9e28 100644 --- a/src/plugins/home/public/application/components/tutorial/tutorial.js +++ b/src/plugins/home/public/application/components/tutorial/tutorial.js @@ -201,26 +201,18 @@ class TutorialUi extends React.Component { * @return {Promise} */ fetchEsHitsStatus = async (esHitsCheckConfig) => { - const searchHeader = JSON.stringify({ index: esHitsCheckConfig.index }); - const searchBody = JSON.stringify({ query: esHitsCheckConfig.query, size: 1 }); - const response = await fetch(this.props.addBasePath('/elasticsearch/_msearch'), { - method: 'post', - body: `${searchHeader}\n${searchBody}\n`, - headers: { - accept: 'application/json', - 'content-type': 'application/x-ndjson', - 'kbn-xsrf': 'kibana', - }, - credentials: 'same-origin', - }); - - if (response.status > 300) { + const { http } = getServices(); + try { + const response = await http.post('/api/home/hits_status', { + body: JSON.stringify({ + index: esHitsCheckConfig.index, + query: esHitsCheckConfig.query, + }), + }); + return response.count > 0 ? StatusCheckStates.HAS_DATA : StatusCheckStates.NO_DATA; + } catch (e) { return StatusCheckStates.ERROR; } - - const results = await response.json(); - const numHits = _.get(results, 'responses.[0].hits.hits.length', 0); - return numHits === 0 ? StatusCheckStates.NO_DATA : StatusCheckStates.HAS_DATA; }; renderInstructionSetsToggle = () => { diff --git a/src/plugins/home/server/plugin.test.ts b/src/plugins/home/server/plugin.test.ts index 33d907315e512..58103430b4d7c 100644 --- a/src/plugins/home/server/plugin.test.ts +++ b/src/plugins/home/server/plugin.test.ts @@ -19,10 +19,7 @@ import { registryForTutorialsMock, registryForSampleDataMock } from './plugin.test.mocks'; import { HomeServerPlugin } from './plugin'; -import { coreMock } from '../../../core/server/mocks'; -import { CoreSetup } from '../../../core/server'; - -type MockedKeys = { [P in keyof T]: jest.Mocked }; +import { coreMock, httpServiceMock } from '../../../core/server/mocks'; describe('HomeServerPlugin', () => { beforeEach(() => { @@ -33,8 +30,16 @@ describe('HomeServerPlugin', () => { }); describe('setup', () => { - const mockCoreSetup: MockedKeys = coreMock.createSetup(); - const initContext = coreMock.createPluginInitializerContext(); + let mockCoreSetup: ReturnType; + let initContext: ReturnType; + let routerMock: ReturnType; + + beforeEach(() => { + mockCoreSetup = coreMock.createSetup(); + routerMock = httpServiceMock.createRouter(); + mockCoreSetup.http.createRouter.mockReturnValue(routerMock); + initContext = coreMock.createPluginInitializerContext(); + }); test('wires up tutorials provider service and returns registerTutorial and addScopedTutorialContextFactory', () => { const setup = new HomeServerPlugin(initContext).setup(mockCoreSetup, {}); @@ -52,6 +57,18 @@ describe('HomeServerPlugin', () => { expect(setup.sampleData).toHaveProperty('addAppLinksToSampleDataset'); expect(setup.sampleData).toHaveProperty('replacePanelInSampleDatasetDashboard'); }); + + test('registers the `/api/home/hits_status` route', () => { + new HomeServerPlugin(initContext).setup(mockCoreSetup, {}); + + expect(routerMock.post).toHaveBeenCalledTimes(1); + expect(routerMock.post).toHaveBeenCalledWith( + expect.objectContaining({ + path: '/api/home/hits_status', + }), + expect.any(Function) + ); + }); }); describe('start', () => { diff --git a/src/plugins/home/server/plugin.ts b/src/plugins/home/server/plugin.ts index 1050c19362ae1..a2f8eec686b21 100644 --- a/src/plugins/home/server/plugin.ts +++ b/src/plugins/home/server/plugin.ts @@ -28,6 +28,7 @@ import { import { UsageCollectionSetup } from '../../usage_collection/server'; import { capabilitiesProvider } from './capabilities_provider'; import { sampleDataTelemetry } from './saved_objects'; +import { registerRoutes } from './routes'; interface HomeServerPluginSetupDependencies { usageCollection?: UsageCollectionSetup; @@ -41,6 +42,10 @@ export class HomeServerPlugin implements Plugin { + router.post( + { + path: '/api/home/hits_status', + validate: { + body: schema.object({ + index: schema.string(), + query: schema.recordOf(schema.string(), schema.any()), + }), + }, + }, + router.handleLegacyErrors(async (context, req, res) => { + const { index, query } = req.body; + const client = context.core.elasticsearch.client; + + try { + const { body } = await client.asCurrentUser.search({ + index, + size: 1, + body: { + query, + }, + }); + const count = body.hits.hits.length; + + return res.ok({ + body: { + count, + }, + }); + } catch (e) { + return res.badRequest({ + body: e, + }); + } + }) + ); +}; diff --git a/src/plugins/home/server/routes/index.ts b/src/plugins/home/server/routes/index.ts new file mode 100644 index 0000000000000..bf492051cdedf --- /dev/null +++ b/src/plugins/home/server/routes/index.ts @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { IRouter } from 'src/core/server'; +import { registerHitsStatusRoute } from './fetch_es_hits_status'; + +export const registerRoutes = (router: IRouter) => { + registerHitsStatusRoute(router); +}; diff --git a/src/plugins/home/server/services/sample_data/data_sets/ecommerce/saved_objects.ts b/src/plugins/home/server/services/sample_data/data_sets/ecommerce/saved_objects.ts index 4c31ccee1243a..37657912deb95 100644 --- a/src/plugins/home/server/services/sample_data/data_sets/ecommerce/saved_objects.ts +++ b/src/plugins/home/server/services/sample_data/data_sets/ecommerce/saved_objects.ts @@ -248,11 +248,11 @@ export const getSavedObjects = (): SavedObject[] => [ version: '1', migrationVersion: {}, attributes: { - title: i18n.translate('home.sampleData.ecommerceSpec.averageSalesPerRegionTitle', { - defaultMessage: '[eCommerce] Average Sales Per Region', + title: i18n.translate('home.sampleData.ecommerceSpec.salesCountMapTitle', { + defaultMessage: '[eCommerce] Sales Count Map', }), visState: - '{"title":"[eCommerce] Average Sales Per Region","type":"region_map","params":{"legendPosition":"bottomright","addTooltip":true,"colorSchema":"Blues","selectedLayer":{"attribution":"

    Made with NaturalEarth|Elastic Maps Service

    ","weight":1,"name":"World Countries","url":"https://vector.maps.elastic.co/blob/5659313586569216?elastic_tile_service_tos=agree&my_app_version=7.0.0-alpha1&license=f6c534b8-91b9-4499-8804-a2e9789ecc95","format":{"type":"geojson"},"fields":[{"name":"iso2","description":"Two letter abbreviation"},{"name":"name","description":"Country name"},{"name":"iso3","description":"Three letter abbreviation"}],"created_at":"2017-04-26T17:12:15.978370","tags":[],"id":5659313586569216,"layerId":"elastic_maps_service.World Countries","isEMS":true},"emsHotLink":"https://maps.elastic.co/v2#file/World Countries","selectedJoinField":{"name":"iso2","description":"Two letter abbreviation"},"isDisplayWarning":true,"wms":{"enabled":false,"options":{"format":"image/png","transparent":true}},"mapZoom":2,"mapCenter":[0,0],"outlineWeight":1,"showAllShapes":true},"aggs":[{"id":"1","enabled":true,"type":"avg","schema":"metric","params":{"field":"taxful_total_price","customLabel":"Average Sale"}},{"id":"2","enabled":true,"type":"terms","schema":"segment","params":{"field":"geoip.country_iso_code","size":100,"order":"desc","orderBy":"1","otherBucket":false,"otherBucketLabel":"Other","missingBucket":false,"missingBucketLabel":"Missing"}}]}', + '{"title":"[eCommerce] Sales Count Map","type":"vega","aggs":[],"params":{"spec":"{\\n $schema: https://vega.github.io/schema/vega/v5.json\\n config: {\\n kibana: {type: \\"map\\", latitude: 25, longitude: -40, zoom: 3}\\n }\\n data: [\\n {\\n name: table\\n url: {\\n index: kibana_sample_data_ecommerce\\n %context%: true\\n %timefield%: order_date\\n body: {\\n size: 0\\n aggs: {\\n gridSplit: {\\n geotile_grid: {field: \\"geoip.location\\", precision: 4, size: 10000}\\n aggs: {\\n gridCentroid: {\\n geo_centroid: {\\n field: \\"geoip.location\\"\\n }\\n }\\n }\\n }\\n }\\n }\\n }\\n format: {property: \\"aggregations.gridSplit.buckets\\"}\\n transform: [\\n {\\n type: geopoint\\n projection: projection\\n fields: [\\n gridCentroid.location.lon\\n gridCentroid.location.lat\\n ]\\n }\\n ]\\n }\\n ]\\n scales: [\\n {\\n name: gridSize\\n type: linear\\n domain: {data: \\"table\\", field: \\"doc_count\\"}\\n range: [\\n 50\\n 1000\\n ]\\n }\\n ]\\n marks: [\\n {\\n name: gridMarker\\n type: symbol\\n from: {data: \\"table\\"}\\n encode: {\\n update: {\\n size: {scale: \\"gridSize\\", field: \\"doc_count\\"}\\n xc: {signal: \\"datum.x\\"}\\n yc: {signal: \\"datum.y\\"}\\n }\\n }\\n },\\n {\\n name: gridLabel\\n type: text\\n from: {data: \\"table\\"}\\n encode: {\\n enter: {\\n fill: {value: \\"firebrick\\"}\\n text: {signal: \\"datum.doc_count\\"}\\n }\\n update: {\\n x: {signal: \\"datum.x\\"}\\n y: {signal: \\"datum.y\\"}\\n dx: {value: -6}\\n dy: {value: 6}\\n fontSize: {value: 18}\\n fontWeight: {value: \\"bold\\"}\\n }\\n }\\n }\\n ]\\n}"}}', uiStateJSON: '{}', description: '', version: 1, diff --git a/src/plugins/home/server/services/sample_data/data_sets/flights/saved_objects.ts b/src/plugins/home/server/services/sample_data/data_sets/flights/saved_objects.ts index c1f7fcb75b149..6f701d75e7d52 100644 --- a/src/plugins/home/server/services/sample_data/data_sets/flights/saved_objects.ts +++ b/src/plugins/home/server/services/sample_data/data_sets/flights/saved_objects.ts @@ -281,11 +281,10 @@ export const getSavedObjects = (): SavedObject[] => [ version: '1', migrationVersion: {}, attributes: { - title: i18n.translate('home.sampleData.flightsSpec.originCountryTicketPricesTitle', { - defaultMessage: '[Flights] Origin Country Ticket Prices', + title: i18n.translate('home.sampleData.flightsSpec.departuresCountMapTitle', { + defaultMessage: '[Flights] Departures Count Map', }), - visState: - '{"title":"[Flights] Origin Country Ticket Prices","type":"region_map","params":{"legendPosition":"bottomright","addTooltip":true,"colorSchema":"Blues","selectedLayer":{"attribution":"

    Made with NaturalEarth | Elastic Maps Service

    ","name":"World Countries","weight":1,"format":{"type":"geojson"},"url":"https://vector.maps.elastic.co/blob/5659313586569216?elastic_tile_service_tos=agree&my_app_version=6.3.0&license=686f9ec6-d775-44f0-b334-38caf85da617","fields":[{"name":"iso2","description":"Two letter abbreviation"},{"name":"name","description":"Country name"},{"name":"iso3","description":"Three letter abbreviation"}],"created_at":"2017-04-26T17:12:15.978370","tags":[],"id":5659313586569216,"layerId":"elastic_maps_service.World Countries"},"selectedJoinField":{"name":"iso2","description":"Two letter abbreviation"},"isDisplayWarning":false,"wms":{"enabled":false,"options":{"format":"image/png","transparent":true},"baseLayersAreLoaded":{},"tmsLayers":[{"id":"road_map","url":"https://tiles.maps.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana&my_app_version=6.3.0&license=686f9ec6-d775-44f0-b334-38caf85da617","minZoom":0,"maxZoom":18,"attribution":"

    © OpenStreetMap contributors | Elastic Maps Service

    ","subdomains":[]}],"selectedTmsLayer":{"id":"road_map","url":"https://tiles.maps.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana&my_app_version=6.3.0&license=686f9ec6-d775-44f0-b334-38caf85da617","minZoom":0,"maxZoom":18,"attribution":"

    © OpenStreetMap contributors | Elastic Maps Service

    ","subdomains":[]}},"mapZoom":2,"mapCenter":[0,0],"outlineWeight":1,"showAllShapes":true},"aggs":[{"id":"1","enabled":true,"type":"avg","schema":"metric","params":{"field":"AvgTicketPrice"}},{"id":"2","enabled":true,"type":"terms","schema":"segment","params":{"field":"OriginCountry","size":100,"order":"desc","orderBy":"1","otherBucket":false,"otherBucketLabel":"Other","missingBucket":false,"missingBucketLabel":"Missing"}}]}', + visState: '{\"title\":\"[Flights] Departure Count Map\",\"type\":\"vega\",\"aggs\":[],\"params\":{\"spec\":\"{\\n $schema: https://vega.github.io/schema/vega/v5.json\\n config: {\\n kibana: {type: \\\"map\\\", latitude: 25, longitude: -40, zoom: 3}\\n }\\n data: [\\n {\\n name: table\\n url: {\\n index: kibana_sample_data_flights\\n %context%: true\\n %timefield%: timestamp\\n body: {\\n size: 0\\n aggs: {\\n gridSplit: {\\n geotile_grid: {field: \\\"OriginLocation\\\", precision: 4, size: 10000}\\n aggs: {\\n gridCentroid: {\\n geo_centroid: {\\n field: \\\"OriginLocation\\\"\\n }\\n }\\n }\\n }\\n }\\n }\\n }\\n format: {property: \\\"aggregations.gridSplit.buckets\\\"}\\n transform: [\\n {\\n type: geopoint\\n projection: projection\\n fields: [\\n gridCentroid.location.lon\\n gridCentroid.location.lat\\n ]\\n }\\n ]\\n }\\n ]\\n scales: [\\n {\\n name: gridSize\\n type: linear\\n domain: {data: \\\"table\\\", field: \\\"doc_count\\\"}\\n range: [\\n 50\\n 1000\\n ]\\n }\\n ]\\n marks: [\\n {\\n name: gridMarker\\n type: symbol\\n from: {data: \\\"table\\\"}\\n encode: {\\n update: {\\n size: {scale: \\\"gridSize\\\", field: \\\"doc_count\\\"}\\n xc: {signal: \\\"datum.x\\\"}\\n yc: {signal: \\\"datum.y\\\"}\\n tooltip: {\\n signal: \\\"{flights: datum.doc_count}\\\"\\n }\\n }\\n }\\n }\\n ]\\n}\"}}', uiStateJSON: '{}', description: '', version: 1, diff --git a/src/plugins/home/server/services/sample_data/data_sets/logs/saved_objects.ts b/src/plugins/home/server/services/sample_data/data_sets/logs/saved_objects.ts index 97258c21bc8f0..f8d39e6689fa8 100644 --- a/src/plugins/home/server/services/sample_data/data_sets/logs/saved_objects.ts +++ b/src/plugins/home/server/services/sample_data/data_sets/logs/saved_objects.ts @@ -51,11 +51,11 @@ export const getSavedObjects = (): SavedObject[] => [ version: '1', migrationVersion: {}, attributes: { - title: i18n.translate('home.sampleData.logsSpec.uniqueVisitorsByCountryTitle', { - defaultMessage: '[Logs] Unique Visitors by Country', + title: i18n.translate('home.sampleData.logsSpec.visitorsMapTitle', { + defaultMessage: '[Logs] Visitors Map', }), visState: - '{"title":"[Logs] Unique Visitors by Country","type":"region_map","params":{"legendPosition":"bottomright","addTooltip":true,"colorSchema":"Reds","selectedLayer":{"attribution":"

    Made with NaturalEarth | Elastic Maps Service

    ","name":"World Countries","weight":1,"format":{"type":"geojson"},"url":"https://vector.maps.elastic.co/blob/5659313586569216?elastic_tile_service_tos=agree&my_app_version=6.2.3&license=77ab0ecf-a521-499d-bd52-fbd740bb81d0","fields":[{"name":"iso2","description":"Two letter abbreviation"},{"name":"name","description":"Country name"},{"name":"iso3","description":"Three letter abbreviation"}],"created_at":"2017-04-26T17:12:15.978370","tags":[],"id":5659313586569216,"layerId":"elastic_maps_service.World Countries"},"selectedJoinField":{"name":"iso2","description":"Two letter abbreviation"},"isDisplayWarning":false,"wms":{"enabled":false,"options":{"format":"image/png","transparent":true},"baseLayersAreLoaded":{},"tmsLayers":[{"id":"road_map","url":"https://tiles.maps.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana&my_app_version=6.2.3&license=77ab0ecf-a521-499d-bd52-fbd740bb81d0","minZoom":0,"maxZoom":18,"attribution":"

    © OpenStreetMap contributors | Elastic Maps Service

    ","subdomains":[]}],"selectedTmsLayer":{"id":"road_map","url":"https://tiles.maps.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana&my_app_version=6.2.3&license=77ab0ecf-a521-499d-bd52-fbd740bb81d0","minZoom":0,"maxZoom":18,"attribution":"

    © OpenStreetMap contributors | Elastic Maps Service

    ","subdomains":[]}},"mapZoom":2,"mapCenter":[0,0],"outlineWeight":1,"showAllShapes":true,"emsHotLink":null},"aggs":[{"id":"1","enabled":true,"type":"cardinality","schema":"metric","params":{"field":"clientip","customLabel":"Unique Visitors"}},{"id":"2","enabled":true,"type":"terms","schema":"segment","params":{"field":"geo.src","size":50,"order":"desc","orderBy":"1","otherBucket":false,"otherBucketLabel":"Other","missingBucket":false,"missingBucketLabel":"Missing"}}]}', + '{"title":"[Logs] Visitors Map","type":"vega","aggs":[],"params":{"spec":"{\\n $schema: https://vega.github.io/schema/vega/v5.json\\n config: {\\n kibana: {type: \\"map\\", latitude: 30, longitude: -120, zoom: 3}\\n }\\n data: [\\n {\\n name: table\\n url: {\\n index: kibana_sample_data_logs\\n %context%: true\\n %timefield%: timestamp\\n body: {\\n size: 0\\n aggs: {\\n gridSplit: {\\n geotile_grid: {field: \\"geo.coordinates\\", precision: 5, size: 10000}\\n aggs: {\\n gridCentroid: {\\n geo_centroid: {\\n field: \\"geo.coordinates\\"\\n }\\n }\\n }\\n }\\n }\\n }\\n }\\n format: {property: \\"aggregations.gridSplit.buckets\\"}\\n transform: [\\n {\\n type: geopoint\\n projection: projection\\n fields: [\\n gridCentroid.location.lon\\n gridCentroid.location.lat\\n ]\\n }\\n ]\\n }\\n ]\\n scales: [\\n {\\n name: gridSize\\n type: linear\\n domain: {data: \\"table\\", field: \\"doc_count\\"}\\n range: [\\n 50\\n 1000\\n ]\\n }\\n {\\n name: bubbleColor\\n type: linear\\n domain: {\\n data: table\\n field: doc_count\\n }\\n range: [\\"rgb(249, 234, 197)\\",\\"rgb(243, 200, 154)\\",\\"rgb(235, 166, 114)\\", \\"rgb(231, 102, 76)\\"]\\n }\\n ]\\n marks: [\\n {\\n name: gridMarker\\n type: symbol\\n from: {data: \\"table\\"}\\n encode: {\\n update: {\\n fill: {\\n scale: bubbleColor\\n field: doc_count\\n }\\n size: {scale: \\"gridSize\\", field: \\"doc_count\\"}\\n xc: {signal: \\"datum.x\\"}\\n yc: {signal: \\"datum.y\\"}\\n tooltip: {\\n signal: \\"{flights: datum.doc_count}\\"\\n }\\n }\\n }\\n }\\n ]\\n}"}}', uiStateJSON: '{}', description: '', version: 1, diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/create_edit_field/create_edit_field.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/create_edit_field/create_edit_field.tsx index 22bc78ee0538e..13be9ca6c9c25 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/create_edit_field/create_edit_field.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/create_edit_field/create_edit_field.tsx @@ -44,7 +44,7 @@ const newFieldPlaceholder = i18n.translate( export const CreateEditField = withRouter( ({ indexPattern, mode, fieldName, history }: CreateEditFieldProps) => { - const { uiSettings, chrome, notifications } = useKibana< + const { uiSettings, chrome, notifications, data } = useKibana< IndexPatternManagmentContext >().services; const spec = @@ -96,6 +96,7 @@ export const CreateEditField = withRouter( indexPattern={indexPattern} spec={spec} services={{ + saveIndexPattern: data.indexPatterns.save.bind(data.indexPatterns), redirectAway, }} /> diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/edit_index_pattern.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/edit_index_pattern.tsx index a0eecef66ff93..d09836019b0bc 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/edit_index_pattern.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/edit_index_pattern.tsx @@ -234,7 +234,13 @@ export const EditIndexPattern = withRouter( )} - +
  • ); diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/__snapshots__/indexed_fields_table.test.tsx.snap b/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/__snapshots__/indexed_fields_table.test.tsx.snap index 47cabc4df662f..45253f6ad27c0 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/__snapshots__/indexed_fields_table.test.tsx.snap +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/__snapshots__/indexed_fields_table.test.tsx.snap @@ -15,13 +15,10 @@ exports[`IndexedFieldsTable should filter based on the query bar 1`] = ` "displayName": "Elastic", "excluded": false, "format": undefined, - "indexPattern": Object { - "getNonScriptedFields": [Function], - }, "info": Array [], "name": "Elastic", "searchable": true, - "type": "name", + "type": "string", }, ] } @@ -44,9 +41,6 @@ exports[`IndexedFieldsTable should filter based on the type filter 1`] = ` "displayName": "timestamp", "excluded": false, "format": undefined, - "indexPattern": Object { - "getNonScriptedFields": [Function], - }, "info": Array [], "name": "timestamp", "type": "date", @@ -72,21 +66,15 @@ exports[`IndexedFieldsTable should render normally 1`] = ` "displayName": "Elastic", "excluded": false, "format": undefined, - "indexPattern": Object { - "getNonScriptedFields": [Function], - }, "info": Array [], "name": "Elastic", "searchable": true, - "type": "name", + "type": "string", }, Object { "displayName": "timestamp", "excluded": false, "format": undefined, - "indexPattern": Object { - "getNonScriptedFields": [Function], - }, "info": Array [], "name": "timestamp", "type": "date", @@ -95,9 +83,6 @@ exports[`IndexedFieldsTable should render normally 1`] = ` "displayName": "conflictingField", "excluded": false, "format": undefined, - "indexPattern": Object { - "getNonScriptedFields": [Function], - }, "info": Array [], "name": "conflictingField", "type": "conflict", diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.test.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.test.tsx index 411bbe23e4761..319b9b2b3fce2 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.test.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.test.tsx @@ -19,7 +19,7 @@ import React from 'react'; import { shallow } from 'enzyme'; -import { IndexPatternField, IIndexPattern, IndexPattern } from 'src/plugins/data/public'; +import { IndexPatternField, IIndexPattern } from 'src/plugins/data/public'; import { IndexedFieldsTable } from './indexed_fields_table'; jest.mock('@elastic/eui', () => ({ @@ -47,10 +47,8 @@ const indexPattern = ({ const mockFieldToIndexPatternField = (spec: Record) => { return new IndexPatternField( - indexPattern as IndexPattern, (spec as unknown) as IndexPatternField['spec'], - spec.displayName as string, - () => {} + spec.displayName as string ); }; @@ -59,7 +57,7 @@ const fields = [ name: 'Elastic', displayName: 'Elastic', searchable: true, - type: 'name', + type: 'string', }, { name: 'timestamp', displayName: 'timestamp', type: 'date' }, { name: 'conflictingField', displayName: 'conflictingField', type: 'conflict' }, diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.tsx index 90f81a88b3da0..23977aac7fa7a 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/indexed_fields_table/indexed_fields_table.tsx @@ -77,7 +77,6 @@ export class IndexedFieldsTable extends Component< return { ...field.spec, displayName: field.displayName, - indexPattern: field.indexPattern, format: getFieldFormat(indexPattern, field.name), excluded: fieldWildcardMatch ? fieldWildcardMatch(field.name) : false, info: helpers.getFieldInfo && helpers.getFieldInfo(indexPattern, field), diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_field_table.test.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_field_table.test.tsx index 80132167b7f58..84469a7e1fbd9 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_field_table.test.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_field_table.test.tsx @@ -21,7 +21,7 @@ import React from 'react'; import { shallow } from 'enzyme'; import { ScriptedFieldsTable } from '../scripted_fields_table'; -import { IIndexPattern } from '../../../../../../plugins/data/common/index_patterns'; +import { IIndexPattern, IndexPattern } from '../../../../../../plugins/data/common/index_patterns'; jest.mock('@elastic/eui', () => ({ EuiTitle: 'eui-title', @@ -46,14 +46,6 @@ jest.mock('./components/table', () => ({ }, })); -jest.mock('ui/documentation_links', () => ({ - documentationLinks: { - scriptedFields: { - painless: 'painlessDocs', - }, - }, -})); - const helpers = { redirectToRoute: () => {}, getRouteHref: () => '#', @@ -62,7 +54,7 @@ const helpers = { const getIndexPatternMock = (mockedFields: any = {}) => ({ ...mockedFields } as IIndexPattern); describe('ScriptedFieldsTable', () => { - let indexPattern: IIndexPattern; + let indexPattern: IndexPattern; beforeEach(() => { indexPattern = getIndexPatternMock({ @@ -70,7 +62,7 @@ describe('ScriptedFieldsTable', () => { { name: 'ScriptedField', lang: 'painless', script: 'x++' }, { name: 'JustATest', lang: 'painless', script: 'z++' }, ], - }); + }) as IndexPattern; }); test('should render normally', async () => { @@ -79,6 +71,7 @@ describe('ScriptedFieldsTable', () => { indexPattern={indexPattern} helpers={helpers} painlessDocLink={'painlessDoc'} + saveIndexPattern={async () => {}} /> ); @@ -96,6 +89,7 @@ describe('ScriptedFieldsTable', () => { indexPattern={indexPattern} helpers={helpers} painlessDocLink={'painlessDoc'} + saveIndexPattern={async () => {}} /> ); @@ -113,15 +107,18 @@ describe('ScriptedFieldsTable', () => { test('should filter based on the lang filter', async () => { const component = shallow( [ - { name: 'ScriptedField', lang: 'painless', script: 'x++' }, - { name: 'JustATest', lang: 'painless', script: 'z++' }, - { name: 'Bad', lang: 'somethingElse', script: 'z++' }, - ], - })} + indexPattern={ + getIndexPatternMock({ + getScriptedFields: () => [ + { name: 'ScriptedField', lang: 'painless', script: 'x++' }, + { name: 'JustATest', lang: 'painless', script: 'z++' }, + { name: 'Bad', lang: 'somethingElse', script: 'z++' }, + ], + }) as IndexPattern + } painlessDocLink={'painlessDoc'} helpers={helpers} + saveIndexPattern={async () => {}} /> ); @@ -139,11 +136,14 @@ describe('ScriptedFieldsTable', () => { test('should hide the table if there are no scripted fields', async () => { const component = shallow( [], - })} + indexPattern={ + getIndexPatternMock({ + getScriptedFields: () => [], + }) as IndexPattern + } painlessDocLink={'painlessDoc'} helpers={helpers} + saveIndexPattern={async () => {}} /> ); @@ -161,6 +161,7 @@ describe('ScriptedFieldsTable', () => { indexPattern={indexPattern} helpers={helpers} painlessDocLink={'painlessDoc'} + saveIndexPattern={async () => {}} /> ); @@ -176,12 +177,15 @@ describe('ScriptedFieldsTable', () => { const removeScriptedField = jest.fn(); const component = shallow( {}} /> ); diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_fields_table.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_fields_table.tsx index 532af2757915b..08cc90faf75fa 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_fields_table.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/scripted_fields_table/scripted_fields_table.tsx @@ -27,10 +27,10 @@ import { import { Table, Header, CallOuts, DeleteScritpedFieldConfirmationModal } from './components'; import { ScriptedFieldItem } from './types'; -import { IIndexPattern } from '../../../../../../plugins/data/public'; +import { IndexPattern, DataPublicPluginStart } from '../../../../../../plugins/data/public'; interface ScriptedFieldsTableProps { - indexPattern: IIndexPattern; + indexPattern: IndexPattern; fieldFilter?: string; scriptedFieldLanguageFilter?: string; helpers: { @@ -39,6 +39,7 @@ interface ScriptedFieldsTableProps { }; onRemoveField?: () => void; painlessDocLink: string; + saveIndexPattern: DataPublicPluginStart['indexPatterns']['save']; } interface ScriptedFieldsTableState { @@ -68,7 +69,7 @@ export class ScriptedFieldsTable extends Component< } fetchFields = async () => { - const fields = await this.props.indexPattern.getScriptedFields(); + const fields = await (this.props.indexPattern.getScriptedFields() as ScriptedFieldItem[]); const deprecatedLangsInUse = []; const deprecatedLangs = getDeprecatedScriptingLanguages(); @@ -121,10 +122,11 @@ export class ScriptedFieldsTable extends Component< }; deleteField = () => { - const { indexPattern, onRemoveField } = this.props; + const { indexPattern, onRemoveField, saveIndexPattern } = this.props; const { fieldToDelete } = this.state; - indexPattern.removeScriptedField(fieldToDelete); + indexPattern.removeScriptedField(fieldToDelete!.name); + saveIndexPattern(indexPattern); if (onRemoveField) { onRemoveField(); diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/__snapshots__/source_filters_table.test.tsx.snap b/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/__snapshots__/source_filters_table.test.tsx.snap index a7b73624c4665..6a2b208c47987 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/__snapshots__/source_filters_table.test.tsx.snap +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/__snapshots__/source_filters_table.test.tsx.snap @@ -14,17 +14,6 @@ exports[`SourceFiltersTable should add a filter 1`] = ` fieldWildcardMatcher={[Function]} indexPattern={ Object { - "save": [MockFunction] { - "calls": Array [ - Array [], - ], - "results": Array [ - Object { - "type": "return", - "value": undefined, - }, - ], - }, "sourceFilters": Array [ Object { "value": "tim*", @@ -108,17 +97,6 @@ exports[`SourceFiltersTable should remove a filter 1`] = ` fieldWildcardMatcher={[Function]} indexPattern={ Object { - "save": [MockFunction] { - "calls": Array [ - Array [], - ], - "results": Array [ - Object { - "type": "return", - "value": undefined, - }, - ], - }, "sourceFilters": Array [ Object { "clientId": 2, @@ -279,17 +257,6 @@ exports[`SourceFiltersTable should update a filter 1`] = ` fieldWildcardMatcher={[Function]} indexPattern={ Object { - "save": [MockFunction] { - "calls": Array [ - Array [], - ], - "results": Array [ - Object { - "type": "return", - "value": undefined, - }, - ], - }, "sourceFilters": Array [ Object { "clientId": 1, diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.test.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.test.tsx index fa048af7c7a70..395e1f3744e94 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.test.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.test.tsx @@ -21,7 +21,7 @@ import React from 'react'; import { shallow } from 'enzyme'; import { SourceFiltersTable } from './source_filters_table'; -import { IIndexPattern } from 'src/plugins/data/public'; +import { IndexPattern } from 'src/plugins/data/public'; jest.mock('@elastic/eui', () => ({ EuiButton: 'eui-button', @@ -52,7 +52,7 @@ const getIndexPatternMock = (mockedFields: any = {}) => ({ sourceFilters: [{ value: 'time*' }, { value: 'nam*' }, { value: 'age*' }], ...mockedFields, - } as IIndexPattern); + } as IndexPattern); describe('SourceFiltersTable', () => { test('should render normally', () => { @@ -61,6 +61,7 @@ describe('SourceFiltersTable', () => { indexPattern={getIndexPatternMock()} fieldWildcardMatcher={() => {}} filterFilter={''} + saveIndexPattern={async () => {}} /> ); @@ -73,6 +74,7 @@ describe('SourceFiltersTable', () => { indexPattern={getIndexPatternMock()} fieldWildcardMatcher={() => {}} filterFilter={''} + saveIndexPattern={async () => {}} /> ); @@ -88,6 +90,7 @@ describe('SourceFiltersTable', () => { })} filterFilter={''} fieldWildcardMatcher={() => {}} + saveIndexPattern={async () => {}} /> ); @@ -98,11 +101,14 @@ describe('SourceFiltersTable', () => { test('should show a delete modal', () => { const component = shallow( {}} + saveIndexPattern={async () => {}} /> ); @@ -112,15 +118,17 @@ describe('SourceFiltersTable', () => { }); test('should remove a filter', async () => { - const save = jest.fn(); + const saveIndexPattern = jest.fn(async () => {}); const component = shallow( {}} + saveIndexPattern={saveIndexPattern} /> ); @@ -129,47 +137,49 @@ describe('SourceFiltersTable', () => { await component.instance().deleteFilter(); component.update(); // We are not calling `.setState` directly so we need to re-render - expect(save).toBeCalled(); + expect(saveIndexPattern).toBeCalled(); expect(component).toMatchSnapshot(); }); test('should add a filter', async () => { - const save = jest.fn(); + const saveIndexPattern = jest.fn(async () => {}); const component = shallow( {}} + saveIndexPattern={saveIndexPattern} /> ); await component.instance().onAddFilter('na*'); component.update(); // We are not calling `.setState` directly so we need to re-render - expect(save).toBeCalled(); + expect(saveIndexPattern).toBeCalled(); expect(component).toMatchSnapshot(); }); test('should update a filter', async () => { - const save = jest.fn(); + const saveIndexPattern = jest.fn(async () => {}); const component = shallow( {}} + saveIndexPattern={saveIndexPattern} /> ); await component.instance().saveFilter({ clientId: 'tim*', value: 'ti*' }); component.update(); // We are not calling `.setState` directly so we need to re-render - expect(save).toBeCalled(); + expect(saveIndexPattern).toBeCalled(); expect(component).toMatchSnapshot(); }); }); diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.tsx index e5c753886ea9f..b00648f124716 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/source_filters_table/source_filters_table.tsx @@ -22,14 +22,15 @@ import { createSelector } from 'reselect'; import { EuiSpacer } from '@elastic/eui'; import { AddFilter, Table, Header, DeleteFilterConfirmationModal } from './components'; -import { IIndexPattern } from '../../../../../../plugins/data/public'; +import { IndexPattern, DataPublicPluginStart } from '../../../../../../plugins/data/public'; import { SourceFiltersTableFilter } from './types'; export interface SourceFiltersTableProps { - indexPattern: IIndexPattern; + indexPattern: IndexPattern; filterFilter: string; fieldWildcardMatcher: Function; onAddOrRemoveFilter?: Function; + saveIndexPattern: DataPublicPluginStart['indexPatterns']['save']; } export interface SourceFiltersTableState { @@ -104,7 +105,7 @@ export class SourceFiltersTable extends Component< }; deleteFilter = async () => { - const { indexPattern, onAddOrRemoveFilter } = this.props; + const { indexPattern, onAddOrRemoveFilter, saveIndexPattern } = this.props; const { filterToDelete, filters } = this.state; indexPattern.sourceFilters = filters.filter((filter) => { @@ -112,7 +113,7 @@ export class SourceFiltersTable extends Component< }); this.setState({ isSaving: true }); - await indexPattern.save(); + await saveIndexPattern(indexPattern); if (onAddOrRemoveFilter) { onAddOrRemoveFilter(); @@ -124,12 +125,12 @@ export class SourceFiltersTable extends Component< }; onAddFilter = async (value: string) => { - const { indexPattern, onAddOrRemoveFilter } = this.props; + const { indexPattern, onAddOrRemoveFilter, saveIndexPattern } = this.props; indexPattern.sourceFilters = [...(indexPattern.sourceFilters || []), { value }]; this.setState({ isSaving: true }); - await indexPattern.save(); + await saveIndexPattern(indexPattern); if (onAddOrRemoveFilter) { onAddOrRemoveFilter(); @@ -140,7 +141,7 @@ export class SourceFiltersTable extends Component< }; saveFilter = async ({ clientId, value }: SourceFiltersTableFilter) => { - const { indexPattern } = this.props; + const { indexPattern, saveIndexPattern } = this.props; const { filters } = this.state; indexPattern.sourceFilters = filters.map((filter) => { @@ -155,7 +156,7 @@ export class SourceFiltersTable extends Component< }); this.setState({ isSaving: true }); - await indexPattern.save(); + await saveIndexPattern(indexPattern); this.updateFilters(); this.setState({ isSaving: false }); }; diff --git a/src/plugins/index_pattern_management/public/components/edit_index_pattern/tabs/tabs.tsx b/src/plugins/index_pattern_management/public/components/edit_index_pattern/tabs/tabs.tsx index 6c9d6db8de130..101399ef02b73 100644 --- a/src/plugins/index_pattern_management/public/components/edit_index_pattern/tabs/tabs.tsx +++ b/src/plugins/index_pattern_management/public/components/edit_index_pattern/tabs/tabs.tsx @@ -35,6 +35,7 @@ import { IndexPattern, IndexPatternField, UI_SETTINGS, + DataPublicPluginStart, } from '../../../../../../plugins/data/public'; import { useKibana } from '../../../../../../plugins/kibana_react/public'; import { IndexPatternManagmentContext } from '../../../types'; @@ -48,6 +49,7 @@ import { getTabs, getPath, convertToEuiSelectOption } from './utils'; interface TabsProps extends Pick { indexPattern: IndexPattern; fields: IndexPatternField[]; + saveIndexPattern: DataPublicPluginStart['indexPatterns']['save']; } const searchAriaLabel = i18n.translate( @@ -71,7 +73,7 @@ const filterPlaceholder = i18n.translate( } ); -export function Tabs({ indexPattern, fields, history, location }: TabsProps) { +export function Tabs({ indexPattern, saveIndexPattern, fields, history, location }: TabsProps) { const { uiSettings, indexPatternManagementStart, docLinks } = useKibana< IndexPatternManagmentContext >().services; @@ -176,7 +178,7 @@ export function Tabs({ indexPattern, fields, history, location }: TabsProps) { indexedFieldTypeFilter={indexedFieldTypeFilter} helpers={{ redirectToRoute: (field: IndexPatternField) => { - history.push(getPath(field)); + history.push(getPath(field, indexPattern)); }, getFieldInfo: indexPatternManagementStart.list.getFieldInfo, }} @@ -191,11 +193,12 @@ export function Tabs({ indexPattern, fields, history, location }: TabsProps) { { - history.push(getPath(field)); + history.push(getPath(field, indexPattern)); }, }} onRemoveField={refreshFilters} @@ -210,6 +213,7 @@ export function Tabs({ indexPattern, fields, history, location }: TabsProps) { {getFilterSection(type)} + + + } + onChange={[Function]} + /> + {!(format as DurationFormat).isHuman() ? ( - - } - isInvalid={!!error} - error={hasDecimalError ? error : null} - > - { - this.onChange({ outputPrecision: e.target.value ? Number(e.target.value) : null }); - }} + <> + + } isInvalid={!!error} - /> - + error={hasDecimalError ? error : null} + > + { + this.onChange({ + outputPrecision: e.target.value ? Number(e.target.value) : null, + }); + }} + isInvalid={!!error} + /> + + + + } + checked={Boolean(formatParams.showSuffix)} + onChange={(e) => { + this.onChange({ showSuffix: !formatParams.showSuffix }); + }} + /> + + ) : null} diff --git a/src/plugins/index_pattern_management/public/components/field_editor/field_editor.test.tsx b/src/plugins/index_pattern_management/public/components/field_editor/field_editor.test.tsx index 96d3fc549ece0..23f52475d413d 100644 --- a/src/plugins/index_pattern_management/public/components/field_editor/field_editor.test.tsx +++ b/src/plugins/index_pattern_management/public/components/field_editor/field_editor.test.tsx @@ -94,6 +94,8 @@ const field = { format: new Format(), }; +const services = { redirectAway: () => {}, saveIndexPattern: async () => {} }; + describe('FieldEditor', () => { let indexPattern: IndexPattern; @@ -122,7 +124,7 @@ describe('FieldEditor', () => { { indexPattern, spec: (field as unknown) as IndexPatternField, - services: { redirectAway: () => {} }, + services, }, mockContext ); @@ -138,12 +140,12 @@ describe('FieldEditor', () => { name: 'test', script: 'doc.test.value', }; - fieldList.push(testField as IndexPatternField); + fieldList.push((testField as unknown) as IndexPatternField); indexPattern.fields.getByName = (name) => { const flds = { [testField.name]: testField, }; - return flds[name] as IndexPatternField; + return (flds[name] as unknown) as IndexPatternField; }; const component = createComponentWithContext( @@ -151,7 +153,7 @@ describe('FieldEditor', () => { { indexPattern, spec: (testField as unknown) as IndexPatternField, - services: { redirectAway: () => {} }, + services, }, mockContext ); @@ -173,7 +175,7 @@ describe('FieldEditor', () => { const flds = { [testField.name]: testField, }; - return flds[name] as IndexPatternField; + return (flds[name] as unknown) as IndexPatternField; }; const component = createComponentWithContext( @@ -181,7 +183,7 @@ describe('FieldEditor', () => { { indexPattern, spec: (testField as unknown) as IndexPatternField, - services: { redirectAway: () => {} }, + services, }, mockContext ); @@ -198,7 +200,7 @@ describe('FieldEditor', () => { { indexPattern, spec: (testField as unknown) as IndexPatternField, - services: { redirectAway: () => {} }, + services, }, mockContext ); @@ -223,7 +225,7 @@ describe('FieldEditor', () => { { indexPattern, spec: (testField as unknown) as IndexPatternField, - services: { redirectAway: () => {} }, + services, }, mockContext ); diff --git a/src/plugins/index_pattern_management/public/components/field_editor/field_editor.tsx b/src/plugins/index_pattern_management/public/components/field_editor/field_editor.tsx index 6a3f632a9582e..4857a402cc4b2 100644 --- a/src/plugins/index_pattern_management/public/components/field_editor/field_editor.tsx +++ b/src/plugins/index_pattern_management/public/components/field_editor/field_editor.tsx @@ -133,6 +133,7 @@ export interface FieldEdiorProps { spec: IndexPatternField['spec']; services: { redirectAway: () => void; + saveIndexPattern: DataPublicPluginStart['indexPatterns']['save']; }; } @@ -757,23 +758,18 @@ export class FieldEditor extends PureComponent { - const { redirectAway } = this.props.services; + const { redirectAway, saveIndexPattern } = this.props.services; const { indexPattern } = this.props; const { spec } = this.state; - const remove = indexPattern.removeScriptedField(spec.name); - - if (remove) { - remove.then(() => { - const message = i18n.translate('indexPatternManagement.deleteField.deletedHeader', { - defaultMessage: "Deleted '{fieldName}'", - values: { fieldName: spec.name }, - }); - this.context.services.notifications.toasts.addSuccess(message); - redirectAway(); + indexPattern.removeScriptedField(spec.name); + saveIndexPattern(indexPattern).then(() => { + const message = i18n.translate('indexPatternManagement.deleteField.deletedHeader', { + defaultMessage: "Deleted '{fieldName}'", + values: { fieldName: spec.name }, }); - } else { + this.context.services.notifications.toasts.addSuccess(message); redirectAway(); - } + }); }; saveField = async () => { @@ -803,7 +799,7 @@ export class FieldEditor extends PureComponent { const message = i18n.translate('indexPatternManagement.deleteField.savedHeader', { defaultMessage: "Saved '{fieldName}'", diff --git a/src/plugins/input_control_vis/public/control/control.ts b/src/plugins/input_control_vis/public/control/control.ts index 91e8f1b26164b..da2dc7bab7cf7 100644 --- a/src/plugins/input_control_vis/public/control/control.ts +++ b/src/plugins/input_control_vis/public/control/control.ts @@ -81,9 +81,10 @@ export abstract class Control { abstract destroy(): void; format = (value: any) => { + const indexPattern = this.filterManager.getIndexPattern(); const field = this.filterManager.getField(); - if (field?.format?.convert) { - return field.format.convert(value); + if (field) { + return indexPattern.getFormatterForField(field).convert(value); } return value; diff --git a/src/plugins/kibana_legacy/public/angular/angular_config.tsx b/src/plugins/kibana_legacy/public/angular/angular_config.tsx index eafcbfda3db00..9dae615bc3848 100644 --- a/src/plugins/kibana_legacy/public/angular/angular_config.tsx +++ b/src/plugins/kibana_legacy/public/angular/angular_config.tsx @@ -27,12 +27,12 @@ import { } from 'angular'; import $ from 'jquery'; import { set } from '@elastic/safer-lodash-set'; -import { cloneDeep, forOwn, get } from 'lodash'; +import { get } from 'lodash'; import * as Rx from 'rxjs'; import { ChromeBreadcrumb, EnvironmentMode, PackageInfo } from 'kibana/public'; import { History } from 'history'; -import { CoreStart, LegacyCoreStart } from 'kibana/public'; +import { CoreStart } from 'kibana/public'; import { isSystemApiRequest } from '../utils'; import { formatAngularHttpError, isAngularHttpError } from '../notify/lib'; @@ -72,32 +72,18 @@ function isDummyRoute($route: any, isLocalAngular: boolean) { export const configureAppAngularModule = ( angularModule: IModule, - newPlatform: - | LegacyCoreStart - | { - core: CoreStart; - readonly env: { - mode: Readonly; - packageInfo: Readonly; - }; - }, + newPlatform: { + core: CoreStart; + readonly env: { + mode: Readonly; + packageInfo: Readonly; + }; + }, isLocalAngular: boolean, getHistory?: () => History ) => { const core = 'core' in newPlatform ? newPlatform.core : newPlatform; - const packageInfo = - 'env' in newPlatform - ? newPlatform.env.packageInfo - : newPlatform.injectedMetadata.getLegacyMetadata(); - - if ('injectedMetadata' in newPlatform) { - forOwn(newPlatform.injectedMetadata.getInjectedVars(), (val, name) => { - if (name !== undefined) { - // The legacy platform modifies some of these values, clone to an unfrozen object. - angularModule.value(name, cloneDeep(val)); - } - }); - } + const packageInfo = newPlatform.env.packageInfo; angularModule .value('kbnVersion', packageInfo.version) @@ -105,13 +91,7 @@ export const configureAppAngularModule = ( .value('buildSha', packageInfo.buildSha) .value('esUrl', getEsUrl(core)) .value('uiCapabilities', core.application.capabilities) - .config( - setupCompileProvider( - 'injectedMetadata' in newPlatform - ? newPlatform.injectedMetadata.getLegacyMetadata().devMode - : newPlatform.env.mode.dev - ) - ) + .config(setupCompileProvider(newPlatform.env.mode.dev)) .config(setupLocationProvider()) .config($setupXsrfRequestInterceptor(packageInfo.version)) .run(capture$httpLoadingCount(core)) diff --git a/src/plugins/kibana_react/public/field_button/field_button.tsx b/src/plugins/kibana_react/public/field_button/field_button.tsx index e5833b261946a..26e6453e4c48b 100644 --- a/src/plugins/kibana_react/public/field_button/field_button.tsx +++ b/src/plugins/kibana_react/public/field_button/field_button.tsx @@ -100,7 +100,16 @@ export function FieldButton({ return (
    -