diff --git a/NOTICE.txt b/NOTICE.txt
index 2341a478cbda9..4eec329b7a603 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -261,33 +261,6 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
----
-This product bundles childnode-remove which is available under a
-"MIT" license.
-
-The MIT License (MIT)
-
-Copyright (c) 2016-present, jszhou
-https://github.com/jserz/js_piece
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
---
This product bundles code based on probot-metadata@1.0.0 which is
available under a "MIT" license.
diff --git a/docs/developer/advanced/upgrading-nodejs.asciidoc b/docs/developer/advanced/upgrading-nodejs.asciidoc
index c1e727b1eac65..3827cb6e9aa7d 100644
--- a/docs/developer/advanced/upgrading-nodejs.asciidoc
+++ b/docs/developer/advanced/upgrading-nodejs.asciidoc
@@ -14,10 +14,14 @@ Theses files must be updated when upgrading Node.js:
- {kib-repo}blob/{branch}/.node-version[`.node-version`]
- {kib-repo}blob/{branch}/.nvmrc[`.nvmrc`]
- {kib-repo}blob/{branch}/package.json[`package.json`] - The version is specified in the `engines.node` field.
+ - {kib-repo}blob/{branch}/WORKSPACE.bazel[`WORKSPACE.bazel`] - The version is specified in the `node_version` property.
+ Besides this property, the list of files under `node_repositories` must be updated along with their respective SHA256 hashes.
+ These can be found on the https://nodejs.org[nodejs.org] website.
+ Example for Node.js v14.16.1: https://nodejs.org/dist/v14.16.1/SHASUMS256.txt.asc
-See PR {kib-repo}pull/86593[#86593] for an example of how the Node.js version has been upgraded previously.
+See PR {kib-repo}pull/96382[#96382] for an example of how the Node.js version has been upgraded previously.
-In the 6.8 branch, the `.ci/Dockerfile` file does not exist, so when upgrading Node.js in that branch, just skip that file.
+In the 6.8 branch, neither the `.ci/Dockerfile` file nor the `WORKSPACE.bazel` file exists, so when upgrading Node.js in that branch, just skip those files.
=== Backporting
diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc
index 691d7fb82f3bc..0c40c2a8c4db9 100644
--- a/docs/developer/plugin-list.asciidoc
+++ b/docs/developer/plugin-list.asciidoc
@@ -485,6 +485,10 @@ Elastic.
|Welcome to the Kibana rollup plugin! This plugin provides Kibana support for Elasticsearch's rollup feature. Please refer to the Elasticsearch documentation to understand rollup indices and how to create rollup jobs.
+|{kib-repo}blob/{branch}/x-pack/plugins/rule_registry/README.md[ruleRegistry]
+|The rule registry plugin aims to make it easy for rule type producers to have their rules produce the data that they need to build rich experiences on top of a unified experience, without the risk of mapping conflicts.
+
+
|{kib-repo}blob/{branch}/x-pack/plugins/runtime_fields/README.md[runtimeFields]
|Welcome to the home of the runtime field editor and everything related to runtime fields!
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.autorefreshdonefn.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.autorefreshdonefn.md
new file mode 100644
index 0000000000000..a5694ea2d1af9
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.autorefreshdonefn.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AutoRefreshDoneFn](./kibana-plugin-plugins-data-public.autorefreshdonefn.md)
+
+## AutoRefreshDoneFn type
+
+Signature:
+
+```typescript
+export declare type AutoRefreshDoneFn = () => void;
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternselectprops.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternselectprops.md
index 80f4832ba5643..5cfd5e1bc9929 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternselectprops.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpatternselectprops.md
@@ -12,6 +12,5 @@ export declare type IndexPatternSelectProps = Required void;
- maxIndexPatterns?: number;
};
```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md
index 2473c9cfdde8d..cc0cb538be611 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.md
@@ -19,6 +19,7 @@ export interface ISearchOptions
| [isRestore](./kibana-plugin-plugins-data-public.isearchoptions.isrestore.md) | boolean
| Whether the session is restored (i.e. search requests should re-use the stored search IDs, rather than starting from scratch) |
| [isStored](./kibana-plugin-plugins-data-public.isearchoptions.isstored.md) | boolean
| Whether the session is already saved (i.e. sent to background) |
| [legacyHitsTotal](./kibana-plugin-plugins-data-public.isearchoptions.legacyhitstotal.md) | boolean
| Request the legacy format for the total number of hits. If sending rest_total_hits_as_int
to something other than true
, this should be set to false
. |
+| [requestResponder](./kibana-plugin-plugins-data-public.isearchoptions.requestresponder.md) | RequestResponder
| |
| [sessionId](./kibana-plugin-plugins-data-public.isearchoptions.sessionid.md) | string
| A session ID, grouping multiple search requests into a single session. |
| [strategy](./kibana-plugin-plugins-data-public.isearchoptions.strategy.md) | string
| Use this option to force using a specific server side search strategy. Leave empty to use the default strategy. |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.requestresponder.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.requestresponder.md
new file mode 100644
index 0000000000000..b4431b9467b71
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.isearchoptions.requestresponder.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [ISearchOptions](./kibana-plugin-plugins-data-public.isearchoptions.md) > [requestResponder](./kibana-plugin-plugins-data-public.isearchoptions.requestresponder.md)
+
+## ISearchOptions.requestResponder property
+
+Signature:
+
+```typescript
+requestResponder?: RequestResponder;
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.kbn_field_types.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.kbn_field_types.md
index 4d75dda61d5c9..521ceeb1e37f2 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.kbn_field_types.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.kbn_field_types.md
@@ -27,6 +27,7 @@ export declare enum KBN_FIELD_TYPES
| HISTOGRAM | "histogram"
| |
| IP | "ip"
| |
| IP\_RANGE | "ip_range"
| |
+| MISSING | "missing"
| |
| MURMUR3 | "murmur3"
| |
| NESTED | "nested"
| |
| NUMBER | "number"
| |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md
index d2e7ef9db05e8..4429f45f55645 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md
@@ -47,6 +47,7 @@
| [getSearchParamsFromRequest(searchRequest, dependencies)](./kibana-plugin-plugins-data-public.getsearchparamsfromrequest.md) | |
| [getTime(indexPattern, timeRange, options)](./kibana-plugin-plugins-data-public.gettime.md) | |
| [plugin(initializerContext)](./kibana-plugin-plugins-data-public.plugin.md) | |
+| [waitUntilNextSessionCompletes$(sessionService, { waitForIdle })](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletes_.md) | Creates an observable that emits when next search session completes. This utility is helpful to use in the application to delay some tasks until next session completes. |
## Interfaces
@@ -92,6 +93,7 @@
| [SearchInterceptorDeps](./kibana-plugin-plugins-data-public.searchinterceptordeps.md) | |
| [SearchSessionInfoProvider](./kibana-plugin-plugins-data-public.searchsessioninfoprovider.md) | Provide info about current search session to be stored in the Search Session saved object |
| [SearchSourceFields](./kibana-plugin-plugins-data-public.searchsourcefields.md) | search source fields |
+| [WaitUntilNextSessionCompletesOptions](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.md) | Options for [waitUntilNextSessionCompletes$()](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletes_.md) |
## Variables
@@ -141,6 +143,7 @@
| [AggParam](./kibana-plugin-plugins-data-public.aggparam.md) | |
| [AggsStart](./kibana-plugin-plugins-data-public.aggsstart.md) | AggsStart represents the actual external contract as AggsCommonStart is only used internally. The difference is that AggsStart includes the typings for the registry with initialized agg types. |
| [AutocompleteStart](./kibana-plugin-plugins-data-public.autocompletestart.md) | \* |
+| [AutoRefreshDoneFn](./kibana-plugin-plugins-data-public.autorefreshdonefn.md) | |
| [CustomFilter](./kibana-plugin-plugins-data-public.customfilter.md) | |
| [EsaggsExpressionFunctionDefinition](./kibana-plugin-plugins-data-public.esaggsexpressionfunctiondefinition.md) | |
| [EsdslExpressionFunctionDefinition](./kibana-plugin-plugins-data-public.esdslexpressionfunctiondefinition.md) | |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md
index cfaad01c029ea..259009c1c5668 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md
@@ -53,7 +53,6 @@ search: {
timeRange: import("../common").TimeRange | undefined;
} | undefined;
};
- getRequestInspectorStats: typeof getRequestInspectorStats;
getResponseInspectorStats: typeof getResponseInspectorStats;
tabifyAggResponse: typeof tabifyAggResponse;
tabifyGetColumns: typeof tabifyGetColumns;
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md
index 7c7f2a53aca92..193a2e5a24f3f 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchbar.md
@@ -7,7 +7,7 @@
Signature:
```typescript
-SearchBar: React.ComponentClass, "query" | "placeholder" | "isLoading" | "iconType" | "indexPatterns" | "filters" | "dataTestSubj" | "isClearable" | "isInvalid" | "storageKey" | "refreshInterval" | "nonKqlMode" | "nonKqlModeHelpText" | "screenTitle" | "disableLanguageSwitcher" | "autoSubmit" | "onRefresh" | "onRefreshChange" | "showQueryInput" | "showDatePicker" | "showAutoRefreshOnly" | "dateRangeFrom" | "dateRangeTo" | "isRefreshPaused" | "customSubmitButton" | "timeHistory" | "indicateNoData" | "onFiltersUpdated" | "savedQuery" | "showSaveQuery" | "onClearSavedQuery" | "showQueryBar" | "showFilterBar" | "onQueryChange" | "onQuerySubmit" | "onSaved" | "onSavedQueryUpdated">, any> & {
- WrappedComponent: React.ComponentType & ReactIntl.InjectedIntlProps>;
+SearchBar: React.ComponentClass, "query" | "placeholder" | "isLoading" | "iconType" | "indexPatterns" | "filters" | "dataTestSubj" | "isClearable" | "refreshInterval" | "nonKqlMode" | "nonKqlModeHelpText" | "screenTitle" | "onRefresh" | "onRefreshChange" | "showQueryInput" | "showDatePicker" | "showAutoRefreshOnly" | "dateRangeFrom" | "dateRangeTo" | "isRefreshPaused" | "customSubmitButton" | "timeHistory" | "indicateNoData" | "onFiltersUpdated" | "savedQuery" | "showSaveQuery" | "onClearSavedQuery" | "showQueryBar" | "showFilterBar" | "onQueryChange" | "onQuerySubmit" | "onSaved" | "onSavedQueryUpdated">, any> & {
+ WrappedComponent: React.ComponentType & ReactIntl.InjectedIntlProps>;
}
```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchsource.getsearchrequestbody.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchsource.getsearchrequestbody.md
index cc50d3f017971..d384b9659dbcd 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchsource.getsearchrequestbody.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchsource.getsearchrequestbody.md
@@ -9,9 +9,9 @@ Returns body contents of the search request, often referred as query DSL.
Signature:
```typescript
-getSearchRequestBody(): Promise;
+getSearchRequestBody(): any;
```
Returns:
-`Promise`
+`any`
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletes_.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletes_.md
new file mode 100644
index 0000000000000..a4b294fb1decd
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletes_.md
@@ -0,0 +1,25 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [waitUntilNextSessionCompletes$](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletes_.md)
+
+## waitUntilNextSessionCompletes$() function
+
+Creates an observable that emits when next search session completes. This utility is helpful to use in the application to delay some tasks until next session completes.
+
+Signature:
+
+```typescript
+export declare function waitUntilNextSessionCompletes$(sessionService: ISessionService, { waitForIdle }?: WaitUntilNextSessionCompletesOptions): import("rxjs").Observable;
+```
+
+## Parameters
+
+| Parameter | Type | Description |
+| --- | --- | --- |
+| sessionService | ISessionService
| [ISessionService](./kibana-plugin-plugins-data-public.isessionservice.md) |
+| { waitForIdle } | WaitUntilNextSessionCompletesOptions
| |
+
+Returns:
+
+`import("rxjs").Observable`
+
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.md
new file mode 100644
index 0000000000000..d575722a22453
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.md
@@ -0,0 +1,20 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [WaitUntilNextSessionCompletesOptions](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.md)
+
+## WaitUntilNextSessionCompletesOptions interface
+
+Options for [waitUntilNextSessionCompletes$()](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletes_.md)
+
+Signature:
+
+```typescript
+export interface WaitUntilNextSessionCompletesOptions
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [waitForIdle](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.waitforidle.md) | number
| For how long to wait between session state transitions before considering that session completed |
+
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.waitforidle.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.waitforidle.md
new file mode 100644
index 0000000000000..60d3df7783852
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.waitforidle.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [WaitUntilNextSessionCompletesOptions](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.md) > [waitForIdle](./kibana-plugin-plugins-data-public.waituntilnextsessioncompletesoptions.waitforidle.md)
+
+## WaitUntilNextSessionCompletesOptions.waitForIdle property
+
+For how long to wait between session state transitions before considering that session completed
+
+Signature:
+
+```typescript
+waitForIdle?: number;
+```
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md
index 7fd4dd5b8e566..413a59be3d427 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md
@@ -19,6 +19,7 @@ export interface ISearchOptions
| [isRestore](./kibana-plugin-plugins-data-server.isearchoptions.isrestore.md) | boolean
| Whether the session is restored (i.e. search requests should re-use the stored search IDs, rather than starting from scratch) |
| [isStored](./kibana-plugin-plugins-data-server.isearchoptions.isstored.md) | boolean
| Whether the session is already saved (i.e. sent to background) |
| [legacyHitsTotal](./kibana-plugin-plugins-data-server.isearchoptions.legacyhitstotal.md) | boolean
| Request the legacy format for the total number of hits. If sending rest_total_hits_as_int
to something other than true
, this should be set to false
. |
+| [requestResponder](./kibana-plugin-plugins-data-server.isearchoptions.requestresponder.md) | RequestResponder
| |
| [sessionId](./kibana-plugin-plugins-data-server.isearchoptions.sessionid.md) | string
| A session ID, grouping multiple search requests into a single session. |
| [strategy](./kibana-plugin-plugins-data-server.isearchoptions.strategy.md) | string
| Use this option to force using a specific server side search strategy. Leave empty to use the default strategy. |
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.requestresponder.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.requestresponder.md
new file mode 100644
index 0000000000000..7440f5a9d26cf
--- /dev/null
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.requestresponder.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ISearchOptions](./kibana-plugin-plugins-data-server.isearchoptions.md) > [requestResponder](./kibana-plugin-plugins-data-server.isearchoptions.requestresponder.md)
+
+## ISearchOptions.requestResponder property
+
+Signature:
+
+```typescript
+requestResponder?: RequestResponder;
+```
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.kbn_field_types.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.kbn_field_types.md
index be4c3705bd8de..40fa872ff0fc6 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.kbn_field_types.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.kbn_field_types.md
@@ -27,6 +27,7 @@ export declare enum KBN_FIELD_TYPES
| HISTOGRAM | "histogram"
| |
| IP | "ip"
| |
| IP\_RANGE | "ip_range"
| |
+| MISSING | "missing"
| |
| MURMUR3 | "murmur3"
| |
| NESTED | "nested"
| |
| NUMBER | "number"
| |
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md
index 0911c3e86964d..930f7710f9a00 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md
@@ -36,8 +36,6 @@ search: {
toAbsoluteDates: typeof toAbsoluteDates;
calcAutoIntervalLessThan: typeof calcAutoIntervalLessThan;
};
- getRequestInspectorStats: typeof getRequestInspectorStats;
- getResponseInspectorStats: typeof getResponseInspectorStats;
tabifyAggResponse: typeof tabifyAggResponse;
tabifyGetColumns: typeof tabifyGetColumns;
}
diff --git a/docs/development/plugins/embeddable/public/kibana-plugin-plugins-embeddable-public.embeddable.getupdated_.md b/docs/development/plugins/embeddable/public/kibana-plugin-plugins-embeddable-public.embeddable.getupdated_.md
index 5201444e69867..290dc10662569 100644
--- a/docs/development/plugins/embeddable/public/kibana-plugin-plugins-embeddable-public.embeddable.getupdated_.md
+++ b/docs/development/plugins/embeddable/public/kibana-plugin-plugins-embeddable-public.embeddable.getupdated_.md
@@ -9,9 +9,9 @@ Merges input$ and output$ streams and debounces emit till next macro-task. Could
Signature:
```typescript
-getUpdated$(): Readonly>;
+getUpdated$(): Readonly>;
```
Returns:
-`Readonly>`
+`Readonly>`
diff --git a/docs/getting-started/quick-start-guide.asciidoc b/docs/getting-started/quick-start-guide.asciidoc
index 1bdc9b9dea859..5e6a60f019bea 100644
--- a/docs/getting-started/quick-start-guide.asciidoc
+++ b/docs/getting-started/quick-start-guide.asciidoc
@@ -12,7 +12,7 @@ When you've finished, you'll know how to:
[float]
=== Required privileges
When security is enabled, you must have `read`, `write`, and `manage` privileges on the `kibana_sample_data_*` indices.
-For more information, refer to {ref}/security-privileges.html[Security privileges].
+Learn how to <>, or refer to {ref}/security-privileges.html[Security privileges] for more information.
[float]
[[set-up-on-cloud]]
@@ -141,3 +141,5 @@ For more information, refer to <>.
If you are you ready to add your own data, refer to <>.
If you want to ingest your data, refer to {fleet-guide}/fleet-quick-start.html[Quick start: Get logs and metrics into the Elastic Stack].
+
+If you want to secure access to your data, refer to our guide on <>
diff --git a/docs/management/advanced-options.asciidoc b/docs/management/advanced-options.asciidoc
index a9de1888465f7..02cb25078cc92 100644
--- a/docs/management/advanced-options.asciidoc
+++ b/docs/management/advanced-options.asciidoc
@@ -119,8 +119,12 @@ When date histograms use the `auto` interval, Kibana attempts to generate this
number of bars.
[[histogram-maxbars]]`histogram:maxBars`::
-Date histograms are not generated with more bars than the value of this property,
-scaling values when necessary.
+To improve performance, limits the density of date and number histograms across {kib}
+using a test query. When the test query contains too many buckets,
+the interval between buckets increases. This setting applies separately
+to each histogram aggregation, and does not apply to other types of aggregations.
+To find the maximum value of this setting, divide the {es} `search.max_buckets`
+value by the maximum number of aggregations in each visualization.
[[history-limit]]`history:limit`::
In fields that have history, such as query inputs, show this many recent values.
@@ -134,9 +138,7 @@ Fields that exist outside of `_source`. Kibana merges these fields into the
document when displaying it.
[[metrics-maxbuckets]]`metrics:max_buckets`::
-The maximum numbers of buckets that a single data source can return. This might
-arise when the user selects a short interval (for example, 1s) for a long time
-period (1 year).
+Affects the *TSVB* histogram density. Must be set higher than `histogram:maxBars`.
[[query-allowleadingwildcards]]`query:allowLeadingWildcards`::
Allows a wildcard (*) as the first character in a query clause. Only applies
diff --git a/docs/maps/trouble-shooting.asciidoc b/docs/maps/trouble-shooting.asciidoc
index 1c53fbd55ea4b..11aa636e0d852 100644
--- a/docs/maps/trouble-shooting.asciidoc
+++ b/docs/maps/trouble-shooting.asciidoc
@@ -44,10 +44,13 @@ Increase <> for large index patterns.
* Ensure fill color and border color are distinguishable from map tiles. It's hard to see white features on a white background.
[float]
-==== Tiles are not displayed
+==== Elastic Maps Service basemaps are not displayed
+*Maps* uses tile and vector data from Elastic Maps Service by default. See <> for more info.
-* Ensure your tile server has configured https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS[Cross-Origin Resource Sharing (CORS)] so tile requests from your Kibana domain have permission to access your tile server domain.
-* Ensure tiles have the required coordinate system. Vector data must use EPSG:4326 and tiles must use EPSG:3857.
+[float]
+==== Custom tiles are not displayed
+* When using a custom tile service, ensure your tile server has configured https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS[Cross-Origin Resource Sharing (CORS)] so tile requests from your {kib} domain have permission to access your tile server domain.
+* Ensure custom vector and tile services have the required coordinate system. Vector data must use EPSG:4326 and tiles must use EPSG:3857.
[float]
==== Coordinate and region map visualizations not available in New Visualization menu
diff --git a/docs/redirects.asciidoc b/docs/redirects.asciidoc
index e4d2b53a2d8d6..5d0242ae31950 100644
--- a/docs/redirects.asciidoc
+++ b/docs/redirects.asciidoc
@@ -286,3 +286,9 @@ This content has moved. See {ref}/ingest.html[Ingest pipelines].
== Timelion
This content has moved. refer to <>.
+
+
+[role="exclude",id="space-rbac-tutorial"]
+== Tutorial: Use role-based access control to customize Kibana spaces
+
+This content has moved. refer to <>.
diff --git a/docs/settings/alert-action-settings.asciidoc b/docs/settings/alert-action-settings.asciidoc
index 20bbbcf874c05..c748d63484e28 100644
--- a/docs/settings/alert-action-settings.asciidoc
+++ b/docs/settings/alert-action-settings.asciidoc
@@ -53,8 +53,12 @@ You can configure the following settings in the `kibana.yml` file.
+
Disabled action types will not appear as an option when creating new connectors, but existing connectors and actions of that type will remain in {kib} and will not function.
+| `xpack.actions`
+`.preconfiguredAlertHistoryEsIndex` {ess-icon}
+ | Enables a preconfigured alert history {es} <> connector. Defaults to `false`.
+
| `xpack.actions.preconfigured`
- | Specifies preconfigured action IDs and configs. Defaults to {}.
+ | Specifies preconfigured connector IDs and configs. Defaults to {}.
| `xpack.actions.proxyUrl` {ess-icon}
| Specifies the proxy URL to use, if using a proxy for actions. By default, no proxy is used.
diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc
index 643718b961650..90e813afad6f4 100644
--- a/docs/setup/settings.asciidoc
+++ b/docs/setup/settings.asciidoc
@@ -429,6 +429,15 @@ to display map tiles in tilemap visualizations. By default,
override this parameter to use their own Tile Map Service. For example:
`"https://tiles.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana"`
+| `migrations.batchSize:`
+ | Defines the number of documents migrated at a time. The higher the value, the faster the Saved Objects migration process performs at the cost of higher memory consumption. If the migration fails due to a `circuit_breaking_exception`, set a smaller `batchSize` value. *Default: `1000`*
+
+| `migrations.enableV2:`
+ | experimental[]. Enables the new Saved Objects migration algorithm. For information about the migration algorithm, refer to <>. When `migrations v2` is stable, the setting will be removed in an upcoming release without any further notice. Setting the value to `false` causes {kib} to use the legacy migration algorithm, which shipped in 7.11 and earlier versions. *Default: `true`*
+
+| `migrations.retryAttempts:`
+ | The number of times migrations retry temporary failures, such as a network timeout, 503 status code, or `snapshot_in_progress_exception`. When upgrade migrations frequently fail after exhausting all retry attempts with a message such as `Unable to complete the [...] step after 15 attempts, terminating.`, increase the setting value. *Default: `15`*
+
| `newsfeed.enabled:`
| Controls whether to enable the newsfeed
system for the {kib} UI notification center. Set to `false` to disable the
diff --git a/docs/user/alerting/action-types/index.asciidoc b/docs/user/alerting/action-types/index.asciidoc
index 80226e737e9c0..e23dcbf298fd5 100644
--- a/docs/user/alerting/action-types/index.asciidoc
+++ b/docs/user/alerting/action-types/index.asciidoc
@@ -82,3 +82,38 @@ PUT test
}
}
--------------------------------------------------
+
+[float]
+[[preconfigured-connector-alert-history]]
+=== Alert history {es} index connector
+
+experimental[] {kib} offers a preconfigured index connector to facilitate indexing active alert data into {es}.
+
+[WARNING]
+==================================================
+This functionality is experimental and may be changed or removed completely in a future release.
+==================================================
+
+To use this connector, set the <> configuration to `true`.
+
+```js
+ xpack.actions.preconfiguredAlertHistoryEsIndex: true
+```
+
+When creating a new rule, add an <> and select the `Alert history Elasticsearch index (preconfigured)` connector.
+
+[role="screenshot"]
+image::images/pre-configured-alert-history-connector.png[Select pre-configured alert history connectors]
+
+Documents are indexed using a preconfigured schema that captures the <> available for the rule. By default, these documents are indexed into the `kibana-alert-history-default` index, but you can specify a different index. Index names must start with `kibana-alert-history-` to take advantage of the preconfigured alert history index template.
+
+[IMPORTANT]
+==============================================
+To write documents to the preconfigured index, you must have `all` or `write` privileges to the `kibana-alert-history-*` indices. Refer to <> for more information.
+==============================================
+
+[NOTE]
+==================================================
+The `kibana-alert-history-*` indices are not configured to use ILM so they must be maintained manually. If the index size grows large,
+consider using the {ref}/docs-delete-by-query.html[delete by query] API to clean up older documents in the index.
+==================================================
\ No newline at end of file
diff --git a/docs/user/alerting/action-types/pre-configured-connectors.asciidoc b/docs/user/alerting/action-types/pre-configured-connectors.asciidoc
index ee8a28a864824..557404f24288a 100644
--- a/docs/user/alerting/action-types/pre-configured-connectors.asciidoc
+++ b/docs/user/alerting/action-types/pre-configured-connectors.asciidoc
@@ -51,6 +51,14 @@ two out-of-the box connectors: <> and <>.
==============================================
+[float]
+[[build-in-preconfigured-connectors]]
+==== Built-in preconfigured connectors
+
+{kib} provides one built-in preconfigured connector:
+
+* <>
+
[float]
[[managing-pre-configured-connectors]]
==== View preconfigured connectors
@@ -63,4 +71,4 @@ image::images/pre-configured-connectors-managing.png[Connectors managing tab wit
Clicking a preconfigured connector shows the description, but not the configuration. A message indicates that this is a preconfigured connector.
[role="screenshot"]
-image::images/pre-configured-connectors-view-screen.png[Pre-configured connector view details]
+image::images/pre-configured-connectors-view-screen.png[Pre-configured connector view details]
\ No newline at end of file
diff --git a/docs/user/alerting/images/pre-configured-alert-history-connector.png b/docs/user/alerting/images/pre-configured-alert-history-connector.png
new file mode 100644
index 0000000000000..35f9b19710cda
Binary files /dev/null and b/docs/user/alerting/images/pre-configured-alert-history-connector.png differ
diff --git a/docs/user/security/authentication/index.asciidoc b/docs/user/security/authentication/index.asciidoc
index a4acc93310e5d..805ae924a599e 100644
--- a/docs/user/security/authentication/index.asciidoc
+++ b/docs/user/security/authentication/index.asciidoc
@@ -292,7 +292,11 @@ xpack.security.authc.providers:
order: 1
-----------------------------------------------
-Kibana uses SPNEGO, which wraps the Kerberos protocol for use with HTTP, extending it to web applications. At the end of the Kerberos handshake, Kibana will forward the service ticket to Elasticsearch. Elasticsearch will unpack it and it will respond with an access and refresh token which are then used for subsequent authentication.
+IMPORTANT: {kib} uses SPNEGO, which wraps the Kerberos protocol for use with HTTP, extending it to web applications.
+At the end of the Kerberos handshake, {kib} forwards the service ticket to {es}, then {es} unpacks the service ticket and responds with an access and refresh token, which are used for subsequent authentication.
+On every {es} node that {kib} connects to, the keytab file should always contain the HTTP service principal for the {kib} host.
+The HTTP service principal name must have the `HTTP/kibana.domain.local@KIBANA.DOMAIN.LOCAL` format.
+
[[anonymous-authentication]]
==== Anonymous authentication
diff --git a/docs/user/security/images/role-index-privilege.png b/docs/user/security/images/role-index-privilege.png
deleted file mode 100644
index 1dc1ae640e3ba..0000000000000
Binary files a/docs/user/security/images/role-index-privilege.png and /dev/null differ
diff --git a/docs/user/security/images/role-management.png b/docs/user/security/images/role-management.png
deleted file mode 100644
index 29efdd85c4df3..0000000000000
Binary files a/docs/user/security/images/role-management.png and /dev/null differ
diff --git a/docs/user/security/images/role-new-user.png b/docs/user/security/images/role-new-user.png
deleted file mode 100644
index c882eeea42d60..0000000000000
Binary files a/docs/user/security/images/role-new-user.png and /dev/null differ
diff --git a/docs/user/security/images/role-space-visualization.png b/docs/user/security/images/role-space-visualization.png
deleted file mode 100644
index 36f83f09f064b..0000000000000
Binary files a/docs/user/security/images/role-space-visualization.png and /dev/null differ
diff --git a/docs/user/security/images/tutorial-secure-access-example-1-role.png b/docs/user/security/images/tutorial-secure-access-example-1-role.png
new file mode 100644
index 0000000000000..53540da7170ea
Binary files /dev/null and b/docs/user/security/images/tutorial-secure-access-example-1-role.png differ
diff --git a/docs/user/security/images/tutorial-secure-access-example-1-space.png b/docs/user/security/images/tutorial-secure-access-example-1-space.png
new file mode 100644
index 0000000000000..a48fdeaa6efa1
Binary files /dev/null and b/docs/user/security/images/tutorial-secure-access-example-1-space.png differ
diff --git a/docs/user/security/images/tutorial-secure-access-example-1-test.png b/docs/user/security/images/tutorial-secure-access-example-1-test.png
new file mode 100644
index 0000000000000..305b97017a9d8
Binary files /dev/null and b/docs/user/security/images/tutorial-secure-access-example-1-test.png differ
diff --git a/docs/user/security/images/tutorial-secure-access-example-1-user.png b/docs/user/security/images/tutorial-secure-access-example-1-user.png
new file mode 100644
index 0000000000000..8df26cf28ef16
Binary files /dev/null and b/docs/user/security/images/tutorial-secure-access-example-1-user.png differ
diff --git a/docs/user/security/index.asciidoc b/docs/user/security/index.asciidoc
index 6a5c4a83aa3ad..71c5bd268a67d 100644
--- a/docs/user/security/index.asciidoc
+++ b/docs/user/security/index.asciidoc
@@ -47,4 +47,3 @@ include::authorization/kibana-privileges.asciidoc[]
include::api-keys/index.asciidoc[]
include::encryption-keys/index.asciidoc[]
include::role-mappings/index.asciidoc[]
-include::rbac_tutorial.asciidoc[]
diff --git a/docs/user/security/rbac_tutorial.asciidoc b/docs/user/security/rbac_tutorial.asciidoc
deleted file mode 100644
index 6324539c3c10a..0000000000000
--- a/docs/user/security/rbac_tutorial.asciidoc
+++ /dev/null
@@ -1,105 +0,0 @@
-[[space-rbac-tutorial]]
-=== Tutorial: Use role-based access control to customize Kibana spaces
-
-With role-based access control (RBAC), you can provide users access to data, tools,
-and Kibana spaces. In this tutorial, you will learn how to configure roles
-that provide the right users with the right access to the data, tools, and
-Kibana spaces.
-
-[float]
-==== Scenario
-
-Our user is a web developer working on a bank's
-online mortgage service. The web developer has these
-three requirements:
-
-* Have access to the data for that service
-* Build visualizations and dashboards
-* Monitor the performance of the system
-
-You'll provide the web developer with the access and privileges to get the job done.
-
-[float]
-==== Prerequisites
-
-To complete this tutorial, you'll need the following:
-
-* **Administrative privileges**: You must have a role that grants privileges to create a space, role, and user. This is any role which grants the `manage_security` cluster privilege. By default, the `superuser` role provides this access. See the {ref}/built-in-roles.html[built-in] roles.
-* **A space**: In this tutorial, use `Dev Mortgage` as the space
-name. See <> for
-details on creating a space.
-* **Data**: You can use <> or
-live data. In the following steps, Filebeat and Metricbeat data are used.
-
-[float]
-==== Steps
-
-With the requirements in mind, here are the steps that you will work
-through in this tutorial:
-
-* Create a role named `mortgage-developer`
-* Give the role permission to access the data in the relevant indices
-* Give the role permission to create visualizations and dashboards
-* Create the web developer's user account with the proper roles
-
-[float]
-==== Create a role
-
-Open the main menu, then click *Stack Management > Roles*
-for an overview of your roles. This view provides actions
-for you to create, edit, and delete roles.
-
-[role="screenshot"]
-image::security/images/role-management.png["Role management"]
-
-
-You can create as many roles as you like. Click *Create role* and
-provide a name. Use `dev-mortgage` because this role is for a developer
-working on the bank's mortgage application.
-
-
-[float]
-==== Give the role permission to access the data
-
-Access to data in indices is an index-level privilege, so in
-*Index privileges*, add lines for the indices that contain the
-data for this role. Two privileges are required: `read` and
-`view_index_metadata`. All privileges are detailed in the
-https://www.elastic.co/guide/en/elasticsearch/reference/current/security-privileges.html[security privileges] documentation.
-
-In the screenshots, Filebeat and Metricbeat data is used, but you
-should use the index patterns for your indices.
-
-[role="screenshot"]
-image::security/images/role-index-privilege.png["Index privilege"]
-
-[float]
-==== Give the role permissions to {kib} apps
-
-To enable users to create dashboards, visualizations, and saved searches, add {kib} privileges to the `dev-mortgage` role.
-
-. On the *{kib} privileges* window, select *Dev Mortgage* from the *Space* dropdown.
-
-. Click **Add space privilege**.
-
-. For *Dashboard*, *Visualize Library*, and *Discover*, click *All*.
-+
-It is common to create saved searches in *Discover* while creating visualizations.
-+
-[role="screenshot"]
-image::security/images/role-space-visualization.png["Associate space"]
-
-[float]
-==== Create the developer user account with the proper roles
-
-. Open the main menu, then click *Stack Management > Users*.
-. Click **Create user**, then give the user the `dev-mortgage`
-and `monitoring-user` roles, which are required for *Stack Monitoring* users.
-
-[role="screenshot"]
-image::security/images/role-new-user.png["Developer user"]
-
-Finally, have the developer log in and access the Dev Mortgage space
-and create a new visualization.
-
-NOTE: If the user is assigned to only one space, they will automatically enter that space on login.
diff --git a/docs/user/security/tutorials/how-to-secure-access-to-kibana.asciidoc b/docs/user/security/tutorials/how-to-secure-access-to-kibana.asciidoc
new file mode 100644
index 0000000000000..63b83712e3e6e
--- /dev/null
+++ b/docs/user/security/tutorials/how-to-secure-access-to-kibana.asciidoc
@@ -0,0 +1,136 @@
+[[tutorial-secure-access-to-kibana]]
+== Securing access to {kib}
+
+
+{kib} is home to an ever-growing suite of powerful features, which help you get the most out of your data. Your data is important, and should be protected. {kib} allows you to secure access to your data and control how users are able to interact with your data.
+
+For example, some users might only need to view your stunning dashboards, while others might need to manage your fleet of Elastic agents and run machine learning jobs to detect anomalous behavior in your network.
+
+This guide introduces you to three of {kib}'s security features: spaces, roles, and users. By the end of this tutorial, you will learn how to manage these entities, and how you can leverage them to secure access to both {kib} and your data.
+
+[float]
+=== Spaces
+
+Do you have multiple teams using {kib}? Do you want a “playground” to experiment with new visualizations or alerts? If so, then <> can help.
+
+Think of a space as another instance of {kib}. A space allows you to organize your <>, <>, <>, and much more into their own categories. For example, you might have a Marketing space for your marketeers to track the results of their campaigns, and an Engineering space for your developers to {apm-get-started-ref}/overview.html[monitor application performance].
+
+The assets you create in one space are isolated from other spaces, so when you enter a space, you only see the assets that belong to that space.
+
+Refer to the <> for more information.
+
+[float]
+=== Roles
+
+Once your spaces are setup, the next step to securing access is to provision your roles. Roles are a collection of privileges that allow you to perform actions in {kib} and Elasticsearch. Roles are assigned to users, and to {ref}/built-in-users.html[system accounts] that power the Elastic Stack.
+
+You can create your own roles, or use any of the {ref}/built-in-roles.html[built-in roles]. Some built-in roles are intended for Elastic Stack components and should not be assigned to end users directly.
+
+One of the more useful built-in roles is `kibana_admin`. Assigning this role to your users will grant access to all of {kib}'s features. This includes the ability to manage Spaces.
+
+The built-in roles are great for getting started with the Elastic Stack, and for system administrators who do not need more restrictive access. With so many features, it’s not possible to ship more granular roles to accommodate everyone’s needs. This is where custom roles come in.
+
+As an administrator, you have the ability to create your own roles to describe exactly the kind of access your users should have. For example, you might create a `marketing_user` role, which you then assign to all users in your marketing department. This role would grant access to all of the necessary data and features for this team to be successful, without granting them access they don’t require.
+
+
+[float]
+=== Users
+
+Once your roles are setup, the next step to securing access is to create your users, and assign them one or more roles. {kib}'s user management allows you to provision accounts for each of your users.
+
+TIP: Want Single Sign-on? {kib} supports a wide range of SSO implementations, including SAML, OIDC, LDAP/AD, and Kerberos. <>.
+
+
+[float]
+[[tutorial-secure-kibana-dashboards-only]]
+=== Example: Create a user with access only to dashboards
+
+Let’s work through an example together. Consider a marketing analyst who wants to monitor the effectiveness of their campaigns. They should be able to see their team’s dashboards, but not be allowed to view or manage anything else in {kib}. All of the team’s dashboards are located in the Marketing space.
+
+[float]
+==== Create a space
+
+Create a Marketing space for your marketing analysts to use.
+
+. Open the main menu, and select **Stack Management**.
+. Under **{kib}**, select **Spaces**.
+. Click **Create a space**.
+. Give this space a unique name. For example: `Marketing`.
+. Click **Create space**.
++
+If you’ve followed the example above, you should end up with a space that looks like this:
++
+[role="screenshot"]
+image::user/security/images/tutorial-secure-access-example-1-space.png[Create space UI]
+
+
+[float]
+==== Create a role
+
+To effectively use dashboards, create a role that describes the privileges you want to grant.
+In this example, a marketing analyst will need:
+
+* Access to **read** the data that powers the dashboards
+* Access to **read** the dashboards within the `Marketing` space
+
+To create the role:
+
+. Open the main menu, and select **Stack Management**.
+. Under **Security**, select **Roles**.
+. Click **Create role**.
+. Give this role a unique name. For example: `marketing_dashboards_role`.
+. For this example, you want to store all marketing data in the `acme-marketing-*` set of indices. To grant this access, locate the **Index privileges** section and enter:
+.. `acme-marketing-*` in the **Indices** field.
+.. `read` and `view_index_metadata` in the **Privileges** field.
++
+TIP: You can add multiple patterns of indices, and grant different access levels to each. Click **Add index privilege** to grant additional access.
+. To grant access to dashboards in the `Marketing` space, locate the {kib} section, and click **Add {kib} privilege**:
+.. From the **Spaces** dropdown, select the `Marketing` space.
+.. Expand the **Analytics** section, and select the **Read** privilege for **Dashboard**.
+.. Click **Add Kibana privilege**.
+. Click **Create role**.
++
+If you’ve followed the example above, you should end up with a role that looks like this:
++
+[role="screenshot"]
+image::user/security/images/tutorial-secure-access-example-1-role.png[Create role UI]
+
+
+[float]
+==== Create a user
+
+Now that you created a role, create a user account.
+
+. Navigate to *Stack Management*, and under *Security*, select *Users*.
+. Click *Create user*.
+. Give this user a descriptive username, and choose a secure password.
+. Assign the *marketing_dashboards_role* that you previously created to this new user.
+. Click *Create user*.
+
+[role="screenshot"]
+image::user/security/images/tutorial-secure-access-example-1-user.png[Create user UI]
+
+[float]
+==== Verify
+
+Verify that the user and role are working correctly.
+
+. Logout of {kib} if you are already logged in.
+. In the login screen, enter the username and password for the account you created.
++
+You’re taken into the `Marketing` space, and the main navigation shows only the *Dashboard* application.
++
+[role="screenshot"]
+image::user/security/images/tutorial-secure-access-example-1-test.png[Verifying access to dashboards]
+
+
+[float]
+=== What's next?
+
+This guide is an introduction to {kib}'s security features. Check out these additional resources to learn more about authenticating and authorizing your users.
+
+* View the <> to learn more about single-sign on and other login features.
+
+* View the <> to learn more about authorizing access to {kib}'s features.
+
+Still have questions? Ask on our https://discuss.elastic.co/c/kibana[Kibana discuss forum] and a fellow community member or Elastic engineer will help out.
diff --git a/docs/user/setup.asciidoc b/docs/user/setup.asciidoc
index a38bf699c1db8..bea13c1ef49b2 100644
--- a/docs/user/setup.asciidoc
+++ b/docs/user/setup.asciidoc
@@ -54,6 +54,8 @@ include::{kib-repo-dir}/setup/start-stop.asciidoc[]
include::{kib-repo-dir}/setup/access.asciidoc[]
+include::security/tutorials/how-to-secure-access-to-kibana.asciidoc[]
+
include::{kib-repo-dir}/setup/connect-to-elasticsearch.asciidoc[]
include::{kib-repo-dir}/setup/upgrade.asciidoc[]
diff --git a/examples/search_examples/public/search/app.tsx b/examples/search_examples/public/search/app.tsx
index c87bf21e0e71c..3bac445581ae7 100644
--- a/examples/search_examples/public/search/app.tsx
+++ b/examples/search_examples/public/search/app.tsx
@@ -204,8 +204,8 @@ export const SearchExamplesApp = ({
});
}
- setRequest(await searchSource.getSearchRequestBody());
- const res = await searchSource.fetch();
+ setRequest(searchSource.getSearchRequestBody());
+ const res = await searchSource.fetch$().toPromise();
setResponse(res);
const message = Searched {res.hits.total} documents.;
diff --git a/package.json b/package.json
index a1acf73ea26f0..9bddca4665467 100644
--- a/package.json
+++ b/package.json
@@ -131,10 +131,12 @@
"@kbn/crypto": "link:packages/kbn-crypto",
"@kbn/i18n": "link:packages/kbn-i18n",
"@kbn/interpreter": "link:packages/kbn-interpreter",
+ "@kbn/io-ts-utils": "link:packages/kbn-io-ts-utils",
"@kbn/legacy-logging": "link:packages/kbn-legacy-logging",
"@kbn/logging": "link:packages/kbn-logging",
"@kbn/monaco": "link:packages/kbn-monaco",
"@kbn/server-http-tools": "link:packages/kbn-server-http-tools",
+ "@kbn/server-route-repository": "link:packages/kbn-server-route-repository",
"@kbn/std": "link:packages/kbn-std",
"@kbn/tinymath": "link:packages/kbn-tinymath",
"@kbn/ui-framework": "link:packages/kbn-ui-framework",
@@ -206,7 +208,6 @@
"content-disposition": "0.5.3",
"copy-to-clipboard": "^3.0.8",
"core-js": "^3.6.5",
- "custom-event-polyfill": "^0.3.0",
"cytoscape": "^3.10.0",
"cytoscape-dagre": "^2.2.2",
"d3": "3.5.17",
diff --git a/packages/kbn-analytics/tsconfig.json b/packages/kbn-analytics/tsconfig.json
index c2e579e7fdbea..80a2255d71805 100644
--- a/packages/kbn-analytics/tsconfig.json
+++ b/packages/kbn-analytics/tsconfig.json
@@ -7,6 +7,7 @@
"emitDeclarationOnly": true,
"declaration": true,
"declarationMap": true,
+ "isolatedModules": true,
"sourceMap": true,
"sourceRoot": "../../../../../packages/kbn-analytics/src",
"types": [
diff --git a/src/core/server/dev/dev_config.ts b/packages/kbn-io-ts-utils/jest.config.js
similarity index 56%
rename from src/core/server/dev/dev_config.ts
rename to packages/kbn-io-ts-utils/jest.config.js
index 2fec778d85713..1a71166fae843 100644
--- a/src/core/server/dev/dev_config.ts
+++ b/packages/kbn-io-ts-utils/jest.config.js
@@ -6,11 +6,8 @@
* Side Public License, v 1.
*/
-import { schema } from '@kbn/config-schema';
-
-export const config = {
- path: 'dev',
- // dev configuration is validated by the dev cli.
- // we only need to register the `dev` schema to avoid failing core's config validation
- schema: schema.object({}, { unknowns: 'ignore' }),
+module.exports = {
+ preset: '@kbn/test',
+ rootDir: '../..',
+ roots: ['/packages/kbn-io-ts-utils'],
};
diff --git a/packages/kbn-io-ts-utils/package.json b/packages/kbn-io-ts-utils/package.json
new file mode 100644
index 0000000000000..4d6f02d3f85a6
--- /dev/null
+++ b/packages/kbn-io-ts-utils/package.json
@@ -0,0 +1,13 @@
+{
+ "name": "@kbn/io-ts-utils",
+ "main": "./target/index.js",
+ "types": "./target/index.d.ts",
+ "version": "1.0.0",
+ "license": "SSPL-1.0 OR Elastic License 2.0",
+ "private": true,
+ "scripts": {
+ "build": "../../node_modules/.bin/tsc",
+ "kbn:bootstrap": "yarn build",
+ "kbn:watch": "yarn build --watch"
+ }
+}
diff --git a/packages/kbn-io-ts-utils/src/index.ts b/packages/kbn-io-ts-utils/src/index.ts
new file mode 100644
index 0000000000000..2032127b1eb91
--- /dev/null
+++ b/packages/kbn-io-ts-utils/src/index.ts
@@ -0,0 +1,11 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export { jsonRt } from './json_rt';
+export { mergeRt } from './merge_rt';
+export { strictKeysRt } from './strict_keys_rt';
diff --git a/x-pack/plugins/apm/common/runtime_types/json_rt/index.test.ts b/packages/kbn-io-ts-utils/src/json_rt/index.test.ts
similarity index 85%
rename from x-pack/plugins/apm/common/runtime_types/json_rt/index.test.ts
rename to packages/kbn-io-ts-utils/src/json_rt/index.test.ts
index d6c286c672d90..1220639fc7bef 100644
--- a/x-pack/plugins/apm/common/runtime_types/json_rt/index.test.ts
+++ b/packages/kbn-io-ts-utils/src/json_rt/index.test.ts
@@ -1,8 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
*/
import * as t from 'io-ts';
@@ -12,9 +13,7 @@ import { Right } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
-function getValueOrThrow>(
- either: TEither
-): Right {
+function getValueOrThrow>(either: TEither): Right {
const value = pipe(
either,
fold(() => {
diff --git a/x-pack/plugins/apm/common/runtime_types/json_rt/index.ts b/packages/kbn-io-ts-utils/src/json_rt/index.ts
similarity index 74%
rename from x-pack/plugins/apm/common/runtime_types/json_rt/index.ts
rename to packages/kbn-io-ts-utils/src/json_rt/index.ts
index 0207145a17be7..bc596d53db54c 100644
--- a/x-pack/plugins/apm/common/runtime_types/json_rt/index.ts
+++ b/packages/kbn-io-ts-utils/src/json_rt/index.ts
@@ -1,8 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
*/
import * as t from 'io-ts';
diff --git a/x-pack/plugins/apm/common/runtime_types/merge/index.test.ts b/packages/kbn-io-ts-utils/src/merge_rt/index.test.ts
similarity index 66%
rename from x-pack/plugins/apm/common/runtime_types/merge/index.test.ts
rename to packages/kbn-io-ts-utils/src/merge_rt/index.test.ts
index af5a0221662d5..b25d4451895f2 100644
--- a/x-pack/plugins/apm/common/runtime_types/merge/index.test.ts
+++ b/packages/kbn-io-ts-utils/src/merge_rt/index.test.ts
@@ -1,18 +1,19 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
*/
import * as t from 'io-ts';
import { isLeft } from 'fp-ts/lib/Either';
-import { merge } from './';
+import { mergeRt } from '.';
import { jsonRt } from '../json_rt';
describe('merge', () => {
it('fails on one or more errors', () => {
- const type = merge([t.type({ foo: t.string }), t.type({ bar: t.number })]);
+ const type = mergeRt(t.type({ foo: t.string }), t.type({ bar: t.number }));
const result = type.decode({ foo: '' });
@@ -20,10 +21,7 @@ describe('merge', () => {
});
it('merges left to right', () => {
- const typeBoolean = merge([
- t.type({ foo: t.string }),
- t.type({ foo: jsonRt.pipe(t.boolean) }),
- ]);
+ const typeBoolean = mergeRt(t.type({ foo: t.string }), t.type({ foo: jsonRt.pipe(t.boolean) }));
const resultBoolean = typeBoolean.decode({
foo: 'true',
@@ -34,10 +32,7 @@ describe('merge', () => {
foo: true,
});
- const typeString = merge([
- t.type({ foo: jsonRt.pipe(t.boolean) }),
- t.type({ foo: t.string }),
- ]);
+ const typeString = mergeRt(t.type({ foo: jsonRt.pipe(t.boolean) }), t.type({ foo: t.string }));
const resultString = typeString.decode({
foo: 'true',
@@ -50,10 +45,10 @@ describe('merge', () => {
});
it('deeply merges values', () => {
- const type = merge([
+ const type = mergeRt(
t.type({ foo: t.type({ baz: t.string }) }),
- t.type({ foo: t.type({ bar: t.string }) }),
- ]);
+ t.type({ foo: t.type({ bar: t.string }) })
+ );
const result = type.decode({
foo: {
diff --git a/x-pack/plugins/apm/common/runtime_types/merge/index.ts b/packages/kbn-io-ts-utils/src/merge_rt/index.ts
similarity index 62%
rename from x-pack/plugins/apm/common/runtime_types/merge/index.ts
rename to packages/kbn-io-ts-utils/src/merge_rt/index.ts
index 451edf678aabe..c582767fb5101 100644
--- a/x-pack/plugins/apm/common/runtime_types/merge/index.ts
+++ b/packages/kbn-io-ts-utils/src/merge_rt/index.ts
@@ -1,31 +1,40 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
*/
import * as t from 'io-ts';
import { merge as lodashMerge } from 'lodash';
import { isLeft } from 'fp-ts/lib/Either';
-import { ValuesType } from 'utility-types';
-export type MergeType<
- T extends t.Any[],
- U extends ValuesType = ValuesType
-> = t.Type & {
- _tag: 'MergeType';
- types: T;
-};
+type PlainObject = Record;
+
+type DeepMerge = U extends PlainObject
+ ? T extends PlainObject
+ ? Omit &
+ {
+ [key in keyof U]: T extends { [k in key]: any } ? DeepMerge : U[key];
+ }
+ : U
+ : U;
// this is similar to t.intersection, but does a deep merge
// instead of a shallow merge
-export function merge(
- types: [A, B]
-): MergeType<[A, B]>;
+export type MergeType = t.Type<
+ DeepMerge, t.TypeOf>,
+ DeepMerge, t.OutputOf>
+> & {
+ _tag: 'MergeType';
+ types: [T1, T2];
+};
+
+export function mergeRt(a: T1, b: T2): MergeType;
-export function merge(types: t.Any[]) {
+export function mergeRt(...types: t.Any[]) {
const mergeType = new t.Type(
'merge',
(u): u is unknown => {
diff --git a/x-pack/plugins/apm/common/runtime_types/strict_keys_rt/index.test.ts b/packages/kbn-io-ts-utils/src/strict_keys_rt/index.test.ts
similarity index 77%
rename from x-pack/plugins/apm/common/runtime_types/strict_keys_rt/index.test.ts
rename to packages/kbn-io-ts-utils/src/strict_keys_rt/index.test.ts
index 4212e0430ff5f..ab20ca42a283e 100644
--- a/x-pack/plugins/apm/common/runtime_types/strict_keys_rt/index.test.ts
+++ b/packages/kbn-io-ts-utils/src/strict_keys_rt/index.test.ts
@@ -1,8 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
*/
import * as t from 'io-ts';
@@ -14,10 +15,7 @@ describe('strictKeysRt', () => {
it('correctly and deeply validates object keys', () => {
const checks: Array<{ type: t.Type; passes: any[]; fails: any[] }> = [
{
- type: t.intersection([
- t.type({ foo: t.string }),
- t.partial({ bar: t.string }),
- ]),
+ type: t.intersection([t.type({ foo: t.string }), t.partial({ bar: t.string })]),
passes: [{ foo: '' }, { foo: '', bar: '' }],
fails: [
{ foo: '', unknownKey: '' },
@@ -26,15 +24,9 @@ describe('strictKeysRt', () => {
},
{
type: t.type({
- path: t.union([
- t.type({ serviceName: t.string }),
- t.type({ transactionType: t.string }),
- ]),
+ path: t.union([t.type({ serviceName: t.string }), t.type({ transactionType: t.string })]),
}),
- passes: [
- { path: { serviceName: '' } },
- { path: { transactionType: '' } },
- ],
+ passes: [{ path: { serviceName: '' } }, { path: { transactionType: '' } }],
fails: [
{ path: { serviceName: '', unknownKey: '' } },
{ path: { transactionType: '', unknownKey: '' } },
@@ -62,9 +54,7 @@ describe('strictKeysRt', () => {
if (!isRight(result)) {
throw new Error(
- `Expected ${JSON.stringify(
- value
- )} to be allowed, but validation failed with ${
+ `Expected ${JSON.stringify(value)} to be allowed, but validation failed with ${
result.left[0].message
}`
);
@@ -76,9 +66,7 @@ describe('strictKeysRt', () => {
if (!isLeft(result)) {
throw new Error(
- `Expected ${JSON.stringify(
- value
- )} to be disallowed, but validation succeeded`
+ `Expected ${JSON.stringify(value)} to be disallowed, but validation succeeded`
);
}
});
diff --git a/x-pack/plugins/apm/common/runtime_types/strict_keys_rt/index.ts b/packages/kbn-io-ts-utils/src/strict_keys_rt/index.ts
similarity index 66%
rename from x-pack/plugins/apm/common/runtime_types/strict_keys_rt/index.ts
rename to packages/kbn-io-ts-utils/src/strict_keys_rt/index.ts
index e90ccf7eb8d31..56afdf54463f7 100644
--- a/x-pack/plugins/apm/common/runtime_types/strict_keys_rt/index.ts
+++ b/packages/kbn-io-ts-utils/src/strict_keys_rt/index.ts
@@ -1,14 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
*/
import * as t from 'io-ts';
import { either, isRight } from 'fp-ts/lib/Either';
import { mapValues, difference, isPlainObject, forEach } from 'lodash';
-import { MergeType, merge } from '../merge';
+import { MergeType, mergeRt } from '../merge_rt';
/*
Type that tracks validated keys, and fails when the input value
@@ -21,7 +22,7 @@ type ParsableType =
| t.PartialType
| t.ExactType
| t.InterfaceType
- | MergeType;
+ | MergeType;
function getKeysInObject>(
object: T,
@@ -32,17 +33,16 @@ function getKeysInObject>(
const ownPrefix = prefix ? `${prefix}.${key}` : key;
keys.push(ownPrefix);
if (isPlainObject(object[key])) {
- keys.push(
- ...getKeysInObject(object[key] as Record, ownPrefix)
- );
+ keys.push(...getKeysInObject(object[key] as Record, ownPrefix));
}
});
return keys;
}
-function addToContextWhenValidated<
- T extends t.InterfaceType | t.PartialType
->(type: T, prefix: string): T {
+function addToContextWhenValidated | t.PartialType>(
+ type: T,
+ prefix: string
+): T {
const validate = (input: unknown, context: t.Context) => {
const result = type.validate(input, context);
const keysType = context[0].type as StrictKeysType;
@@ -50,36 +50,19 @@ function addToContextWhenValidated<
throw new Error('Expected a top-level StrictKeysType');
}
if (isRight(result)) {
- keysType.trackedKeys.push(
- ...Object.keys(type.props).map((propKey) => `${prefix}${propKey}`)
- );
+ keysType.trackedKeys.push(...Object.keys(type.props).map((propKey) => `${prefix}${propKey}`));
}
return result;
};
if (type._tag === 'InterfaceType') {
- return new t.InterfaceType(
- type.name,
- type.is,
- validate,
- type.encode,
- type.props
- ) as T;
+ return new t.InterfaceType(type.name, type.is, validate, type.encode, type.props) as T;
}
- return new t.PartialType(
- type.name,
- type.is,
- validate,
- type.encode,
- type.props
- ) as T;
+ return new t.PartialType(type.name, type.is, validate, type.encode, type.props) as T;
}
-function trackKeysOfValidatedTypes(
- type: ParsableType | t.Any,
- prefix: string = ''
-): t.Any {
+function trackKeysOfValidatedTypes(type: ParsableType | t.Any, prefix: string = ''): t.Any {
if (!('_tag' in type)) {
return type;
}
@@ -89,27 +72,24 @@ function trackKeysOfValidatedTypes(
case 'IntersectionType': {
const collectionType = type as t.IntersectionType;
return t.intersection(
- collectionType.types.map((rt) =>
- trackKeysOfValidatedTypes(rt, prefix)
- ) as [t.Any, t.Any]
+ collectionType.types.map((rt) => trackKeysOfValidatedTypes(rt, prefix)) as [t.Any, t.Any]
);
}
case 'UnionType': {
const collectionType = type as t.UnionType;
return t.union(
- collectionType.types.map((rt) =>
- trackKeysOfValidatedTypes(rt, prefix)
- ) as [t.Any, t.Any]
+ collectionType.types.map((rt) => trackKeysOfValidatedTypes(rt, prefix)) as [t.Any, t.Any]
);
}
case 'MergeType': {
- const collectionType = type as MergeType;
- return merge(
- collectionType.types.map((rt) =>
- trackKeysOfValidatedTypes(rt, prefix)
- ) as [t.Any, t.Any]
+ const collectionType = type as MergeType;
+ return mergeRt(
+ ...(collectionType.types.map((rt) => trackKeysOfValidatedTypes(rt, prefix)) as [
+ t.Any,
+ t.Any
+ ])
);
}
@@ -142,9 +122,7 @@ function trackKeysOfValidatedTypes(
case 'ExactType': {
const exactType = type as t.ExactType;
- return t.exact(
- trackKeysOfValidatedTypes(exactType.type, prefix) as t.HasProps
- );
+ return t.exact(trackKeysOfValidatedTypes(exactType.type, prefix) as t.HasProps);
}
default:
@@ -169,17 +147,11 @@ class StrictKeysType<
(input, context) => {
this.trackedKeys.length = 0;
return either.chain(trackedType.validate(input, context), (i) => {
- const originalKeys = getKeysInObject(
- input as Record
- );
+ const originalKeys = getKeysInObject(input as Record);
const excessKeys = difference(originalKeys, this.trackedKeys);
if (excessKeys.length) {
- return t.failure(
- i,
- context,
- `Excess keys are not allowed: \n${excessKeys.join('\n')}`
- );
+ return t.failure(i, context, `Excess keys are not allowed: \n${excessKeys.join('\n')}`);
}
return t.success(i);
diff --git a/packages/kbn-io-ts-utils/tsconfig.json b/packages/kbn-io-ts-utils/tsconfig.json
new file mode 100644
index 0000000000000..6c67518e21073
--- /dev/null
+++ b/packages/kbn-io-ts-utils/tsconfig.json
@@ -0,0 +1,19 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "incremental": false,
+ "outDir": "./target",
+ "stripInternal": false,
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-io-ts-utils/src",
+ "types": [
+ "jest",
+ "node"
+ ]
+ },
+ "include": [
+ "./src/**/*.ts"
+ ]
+}
diff --git a/packages/kbn-server-route-repository/README.md b/packages/kbn-server-route-repository/README.md
new file mode 100644
index 0000000000000..e22205540ef31
--- /dev/null
+++ b/packages/kbn-server-route-repository/README.md
@@ -0,0 +1,7 @@
+# @kbn/server-route-repository
+
+Utility functions for creating a typed server route repository, and a typed client, generating runtime validation and type validation from the same route definition.
+
+## Usage
+
+TBD
diff --git a/packages/kbn-server-route-repository/jest.config.js b/packages/kbn-server-route-repository/jest.config.js
new file mode 100644
index 0000000000000..7449bb7cd3860
--- /dev/null
+++ b/packages/kbn-server-route-repository/jest.config.js
@@ -0,0 +1,13 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+module.exports = {
+ preset: '@kbn/test',
+ rootDir: '../..',
+ roots: ['/packages/kbn-server-route-repository'],
+};
diff --git a/packages/kbn-server-route-repository/package.json b/packages/kbn-server-route-repository/package.json
new file mode 100644
index 0000000000000..ce1ca02d0c4f6
--- /dev/null
+++ b/packages/kbn-server-route-repository/package.json
@@ -0,0 +1,16 @@
+{
+ "name": "@kbn/server-route-repository",
+ "main": "./target/index.js",
+ "types": "./target/index.d.ts",
+ "version": "1.0.0",
+ "license": "SSPL-1.0 OR Elastic License 2.0",
+ "private": true,
+ "scripts": {
+ "build": "../../node_modules/.bin/tsc",
+ "kbn:bootstrap": "yarn build",
+ "kbn:watch": "yarn build --watch"
+ },
+ "dependencies": {
+ "@kbn/io-ts-utils": "link:../kbn-io-ts-utils"
+ }
+}
diff --git a/packages/kbn-server-route-repository/src/create_server_route_factory.ts b/packages/kbn-server-route-repository/src/create_server_route_factory.ts
new file mode 100644
index 0000000000000..edf9bd657f995
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/create_server_route_factory.ts
@@ -0,0 +1,38 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+import {
+ ServerRouteCreateOptions,
+ ServerRouteHandlerResources,
+ RouteParamsRT,
+ ServerRoute,
+} from './typings';
+
+export function createServerRouteFactory<
+ TRouteHandlerResources extends ServerRouteHandlerResources,
+ TRouteCreateOptions extends ServerRouteCreateOptions
+>(): <
+ TEndpoint extends string,
+ TReturnType,
+ TRouteParamsRT extends RouteParamsRT | undefined = undefined
+>(
+ route: ServerRoute<
+ TEndpoint,
+ TRouteParamsRT,
+ TRouteHandlerResources,
+ TReturnType,
+ TRouteCreateOptions
+ >
+) => ServerRoute<
+ TEndpoint,
+ TRouteParamsRT,
+ TRouteHandlerResources,
+ TReturnType,
+ TRouteCreateOptions
+> {
+ return (route) => route;
+}
diff --git a/packages/kbn-server-route-repository/src/create_server_route_repository.ts b/packages/kbn-server-route-repository/src/create_server_route_repository.ts
new file mode 100644
index 0000000000000..5ac89ebcac77f
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/create_server_route_repository.ts
@@ -0,0 +1,39 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+import {
+ ServerRouteHandlerResources,
+ ServerRouteRepository,
+ ServerRouteCreateOptions,
+} from './typings';
+
+export function createServerRouteRepository<
+ TRouteHandlerResources extends ServerRouteHandlerResources = never,
+ TRouteCreateOptions extends ServerRouteCreateOptions = never
+>(): ServerRouteRepository {
+ let routes: Record = {};
+
+ return {
+ add(route) {
+ routes = {
+ ...routes,
+ [route.endpoint]: route,
+ };
+
+ return this as any;
+ },
+ merge(repository) {
+ routes = {
+ ...routes,
+ ...Object.fromEntries(repository.getRoutes().map((route) => [route.endpoint, route])),
+ };
+
+ return this as any;
+ },
+ getRoutes: () => Object.values(routes),
+ };
+}
diff --git a/packages/kbn-server-route-repository/src/decode_request_params.test.ts b/packages/kbn-server-route-repository/src/decode_request_params.test.ts
new file mode 100644
index 0000000000000..08ef303ad0b3a
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/decode_request_params.test.ts
@@ -0,0 +1,122 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+import { jsonRt } from '@kbn/io-ts-utils';
+import * as t from 'io-ts';
+import { decodeRequestParams } from './decode_request_params';
+
+describe('decodeRequestParams', () => {
+ it('decodes request params', () => {
+ const decode = () => {
+ return decodeRequestParams(
+ {
+ params: {
+ serviceName: 'opbeans-java',
+ },
+ body: null,
+ query: {
+ start: '',
+ },
+ },
+ t.type({
+ path: t.type({
+ serviceName: t.string,
+ }),
+ query: t.type({
+ start: t.string,
+ }),
+ })
+ );
+ };
+ expect(decode).not.toThrow();
+
+ expect(decode()).toEqual({
+ path: {
+ serviceName: 'opbeans-java',
+ },
+ query: {
+ start: '',
+ },
+ });
+ });
+
+ it('fails on excess keys', () => {
+ const decode = () => {
+ return decodeRequestParams(
+ {
+ params: {
+ serviceName: 'opbeans-java',
+ extraKey: '',
+ },
+ body: null,
+ query: {
+ start: '',
+ },
+ },
+ t.type({
+ path: t.type({
+ serviceName: t.string,
+ }),
+ query: t.type({
+ start: t.string,
+ }),
+ })
+ );
+ };
+
+ expect(decode).toThrowErrorMatchingInlineSnapshot(`
+ "Excess keys are not allowed:
+ path.extraKey"
+ `);
+ });
+
+ it('returns the decoded output', () => {
+ const decode = () => {
+ return decodeRequestParams(
+ {
+ params: {},
+ query: {
+ _inspect: 'true',
+ },
+ body: null,
+ },
+ t.type({
+ query: t.type({
+ _inspect: jsonRt.pipe(t.boolean),
+ }),
+ })
+ );
+ };
+
+ expect(decode).not.toThrow();
+
+ expect(decode()).toEqual({
+ query: {
+ _inspect: true,
+ },
+ });
+ });
+
+ it('strips empty params', () => {
+ const decode = () => {
+ return decodeRequestParams(
+ {
+ params: {},
+ query: {},
+ body: {},
+ },
+ t.type({
+ body: t.any,
+ })
+ );
+ };
+
+ expect(decode).not.toThrow();
+
+ expect(decode()).toEqual({});
+ });
+});
diff --git a/packages/kbn-server-route-repository/src/decode_request_params.ts b/packages/kbn-server-route-repository/src/decode_request_params.ts
new file mode 100644
index 0000000000000..00492d69b8ac5
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/decode_request_params.ts
@@ -0,0 +1,43 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+import * as t from 'io-ts';
+import { omitBy, isPlainObject, isEmpty } from 'lodash';
+import { isLeft } from 'fp-ts/lib/Either';
+import { PathReporter } from 'io-ts/lib/PathReporter';
+import Boom from '@hapi/boom';
+import { strictKeysRt } from '@kbn/io-ts-utils';
+import { RouteParamsRT } from './typings';
+
+interface KibanaRequestParams {
+ body: unknown;
+ query: unknown;
+ params: unknown;
+}
+
+export function decodeRequestParams(
+ params: KibanaRequestParams,
+ paramsRt: T
+): t.OutputOf {
+ const paramMap = omitBy(
+ {
+ path: params.params,
+ body: params.body,
+ query: params.query,
+ },
+ (val) => val === null || val === undefined || (isPlainObject(val) && isEmpty(val))
+ );
+
+ // decode = validate
+ const result = strictKeysRt(paramsRt).decode(paramMap);
+
+ if (isLeft(result)) {
+ throw Boom.badRequest(PathReporter.report(result)[0]);
+ }
+
+ return result.right;
+}
diff --git a/packages/kbn-server-route-repository/src/format_request.ts b/packages/kbn-server-route-repository/src/format_request.ts
new file mode 100644
index 0000000000000..49004a78ce0e0
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/format_request.ts
@@ -0,0 +1,20 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { parseEndpoint } from './parse_endpoint';
+
+export function formatRequest(endpoint: string, pathParams: Record = {}) {
+ const { method, pathname: rawPathname } = parseEndpoint(endpoint);
+
+ // replace template variables with path params
+ const pathname = Object.keys(pathParams).reduce((acc, paramName) => {
+ return acc.replace(`{${paramName}}`, pathParams[paramName]);
+ }, rawPathname);
+
+ return { method, pathname };
+}
diff --git a/packages/kbn-server-route-repository/src/index.ts b/packages/kbn-server-route-repository/src/index.ts
new file mode 100644
index 0000000000000..23621c5b213bc
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/index.ts
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export { createServerRouteRepository } from './create_server_route_repository';
+export { createServerRouteFactory } from './create_server_route_factory';
+export { formatRequest } from './format_request';
+export { parseEndpoint } from './parse_endpoint';
+export { decodeRequestParams } from './decode_request_params';
+export { routeValidationObject } from './route_validation_object';
+export {
+ RouteRepositoryClient,
+ ReturnOf,
+ EndpointOf,
+ ClientRequestParamsOf,
+ DecodedRequestParamsOf,
+ ServerRouteRepository,
+ ServerRoute,
+ RouteParamsRT,
+} from './typings';
diff --git a/packages/kbn-server-route-repository/src/parse_endpoint.ts b/packages/kbn-server-route-repository/src/parse_endpoint.ts
new file mode 100644
index 0000000000000..fd40489b0f4a5
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/parse_endpoint.ts
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+type Method = 'get' | 'post' | 'put' | 'delete';
+
+export function parseEndpoint(endpoint: string) {
+ const parts = endpoint.split(' ');
+
+ const method = parts[0].trim().toLowerCase() as Method;
+ const pathname = parts[1].trim();
+
+ if (!['get', 'post', 'put', 'delete'].includes(method)) {
+ throw new Error('Endpoint was not prefixed with a valid HTTP method');
+ }
+
+ return { method, pathname };
+}
diff --git a/packages/kbn-server-route-repository/src/route_validation_object.ts b/packages/kbn-server-route-repository/src/route_validation_object.ts
new file mode 100644
index 0000000000000..550be8d20d446
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/route_validation_object.ts
@@ -0,0 +1,20 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+import { schema } from '@kbn/config-schema';
+
+const anyObject = schema.object({}, { unknowns: 'allow' });
+
+export const routeValidationObject = {
+ // `body` can be null, but `validate` expects non-nullable types
+ // if any validation is defined. Not having validation currently
+ // means we don't get the payload. See
+ // https://github.com/elastic/kibana/issues/50179
+ body: schema.nullable(anyObject),
+ params: anyObject,
+ query: anyObject,
+};
diff --git a/packages/kbn-server-route-repository/src/test_types.ts b/packages/kbn-server-route-repository/src/test_types.ts
new file mode 100644
index 0000000000000..c9015e19b82f8
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/test_types.ts
@@ -0,0 +1,238 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+import * as t from 'io-ts';
+import { createServerRouteRepository } from './create_server_route_repository';
+import { decodeRequestParams } from './decode_request_params';
+import { EndpointOf, ReturnOf, RouteRepositoryClient } from './typings';
+
+function assertType(value: TShape) {
+ return value;
+}
+
+// Generic arguments for createServerRouteRepository should be set,
+// if not, registering routes should not be allowed
+createServerRouteRepository().add({
+ // @ts-expect-error
+ endpoint: 'any_endpoint',
+ // @ts-expect-error
+ handler: async ({ params }) => {},
+});
+
+// If a params codec is not set, its type should not be available in
+// the request handler.
+createServerRouteRepository<{}, {}>().add({
+ endpoint: 'endpoint_without_params',
+ handler: async (resources) => {
+ // @ts-expect-error Argument of type '{}' is not assignable to parameter of type '{ params: any; }'.
+ assertType<{ params: any }>(resources);
+ },
+});
+
+// If a params codec is set, its type _should_ be available in the
+// request handler.
+createServerRouteRepository<{}, {}>().add({
+ endpoint: 'endpoint_with_params',
+ params: t.type({
+ path: t.type({
+ serviceName: t.string,
+ }),
+ }),
+ handler: async (resources) => {
+ assertType<{ params: { path: { serviceName: string } } }>(resources);
+ },
+});
+
+// Resources should be passed to the request handler.
+createServerRouteRepository<{ context: { getSpaceId: () => string } }, {}>().add({
+ endpoint: 'endpoint_with_params',
+ params: t.type({
+ path: t.type({
+ serviceName: t.string,
+ }),
+ }),
+ handler: async ({ context }) => {
+ const spaceId = context.getSpaceId();
+ assertType(spaceId);
+ },
+});
+
+// Create options are available when registering a route.
+createServerRouteRepository<{}, { options: { tags: string[] } }>().add({
+ endpoint: 'endpoint_with_params',
+ params: t.type({
+ path: t.type({
+ serviceName: t.string,
+ }),
+ }),
+ options: {
+ tags: [],
+ },
+ handler: async (resources) => {
+ assertType<{ params: { path: { serviceName: string } } }>(resources);
+ },
+});
+
+const repository = createServerRouteRepository<{}, {}>()
+ .add({
+ endpoint: 'endpoint_without_params',
+ handler: async () => {
+ return {
+ noParamsForMe: true,
+ };
+ },
+ })
+ .add({
+ endpoint: 'endpoint_with_params',
+ params: t.type({
+ path: t.type({
+ serviceName: t.string,
+ }),
+ }),
+ handler: async () => {
+ return {
+ yesParamsForMe: true,
+ };
+ },
+ })
+ .add({
+ endpoint: 'endpoint_with_optional_params',
+ params: t.partial({
+ query: t.partial({
+ serviceName: t.string,
+ }),
+ }),
+ handler: async () => {
+ return {
+ someParamsForMe: true,
+ };
+ },
+ });
+
+type TestRepository = typeof repository;
+
+// EndpointOf should return all valid endpoints of a repository
+
+assertType>>([
+ 'endpoint_with_params',
+ 'endpoint_without_params',
+ 'endpoint_with_optional_params',
+]);
+
+// @ts-expect-error Type '"this_endpoint_does_not_exist"' is not assignable to type '"endpoint_without_params" | "endpoint_with_params" | "endpoint_with_optional_params"'
+assertType>>(['this_endpoint_does_not_exist']);
+
+// ReturnOf should return the return type of a request handler.
+
+assertType>({
+ noParamsForMe: true,
+});
+
+const noParamsInvalid: ReturnOf = {
+ // @ts-expect-error type '{ paramsForMe: boolean; }' is not assignable to type '{ noParamsForMe: boolean; }'.
+ paramsForMe: true,
+};
+
+// RouteRepositoryClient
+
+type TestClient = RouteRepositoryClient;
+
+const client: TestClient = {} as any;
+
+// It should respect any additional create options.
+
+// @ts-expect-error Property 'timeout' is missing
+client({
+ endpoint: 'endpoint_without_params',
+});
+
+client({
+ endpoint: 'endpoint_without_params',
+ timeout: 1,
+});
+
+// It does not allow params for routes without a params codec
+client({
+ endpoint: 'endpoint_without_params',
+ // @ts-expect-error Object literal may only specify known properties, and 'params' does not exist in type
+ params: {},
+ timeout: 1,
+});
+
+// It requires params for routes with a params codec
+client({
+ endpoint: 'endpoint_with_params',
+ params: {
+ // @ts-expect-error property 'serviceName' is missing in type '{}'
+ path: {},
+ },
+ timeout: 1,
+});
+
+// Params are optional if the codec has no required keys
+client({
+ endpoint: 'endpoint_with_optional_params',
+ timeout: 1,
+});
+
+// If optional, an error will still occur if the params do not match
+client({
+ endpoint: 'endpoint_with_optional_params',
+ timeout: 1,
+ params: {
+ // @ts-expect-error Object literal may only specify known properties, and 'path' does not exist in type
+ path: '',
+ },
+});
+
+// The return type is correctly inferred
+client({
+ endpoint: 'endpoint_with_params',
+ params: {
+ path: {
+ serviceName: '',
+ },
+ },
+ timeout: 1,
+}).then((res) => {
+ assertType<{
+ noParamsForMe: boolean;
+ // @ts-expect-error Property 'noParamsForMe' is missing in type
+ }>(res);
+
+ assertType<{
+ yesParamsForMe: boolean;
+ }>(res);
+});
+
+// decodeRequestParams should return the type of the codec that is passed
+assertType<{ path: { serviceName: string } }>(
+ decodeRequestParams(
+ {
+ params: {
+ serviceName: 'serviceName',
+ },
+ body: undefined,
+ query: undefined,
+ },
+ t.type({ path: t.type({ serviceName: t.string }) })
+ )
+);
+
+assertType<{ path: { serviceName: boolean } }>(
+ // @ts-expect-error The types of 'path.serviceName' are incompatible between these types.
+ decodeRequestParams(
+ {
+ params: {
+ serviceName: 'serviceName',
+ },
+ body: undefined,
+ query: undefined,
+ },
+ t.type({ path: t.type({ serviceName: t.string }) })
+ )
+);
diff --git a/packages/kbn-server-route-repository/src/typings.ts b/packages/kbn-server-route-repository/src/typings.ts
new file mode 100644
index 0000000000000..c27f67c71e88b
--- /dev/null
+++ b/packages/kbn-server-route-repository/src/typings.ts
@@ -0,0 +1,192 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+import { RequiredKeys } from 'utility-types';
+
+type MaybeOptional }> = RequiredKeys<
+ T['params']
+> extends never
+ ? { params?: T['params'] }
+ : { params: T['params'] };
+
+type WithoutIncompatibleMethods = Omit & {
+ encode: t.Encode;
+ asEncoder: () => t.Encoder;
+};
+
+export type RouteParamsRT = WithoutIncompatibleMethods<
+ t.Type<{
+ path?: any;
+ query?: any;
+ body?: any;
+ }>
+>;
+
+export interface RouteState {
+ [endpoint: string]: ServerRoute;
+}
+
+export type ServerRouteHandlerResources = Record;
+export type ServerRouteCreateOptions = Record;
+
+export type ServerRoute<
+ TEndpoint extends string,
+ TRouteParamsRT extends RouteParamsRT | undefined,
+ TRouteHandlerResources extends ServerRouteHandlerResources,
+ TReturnType,
+ TRouteCreateOptions extends ServerRouteCreateOptions
+> = {
+ endpoint: TEndpoint;
+ params?: TRouteParamsRT;
+ handler: ({}: TRouteHandlerResources &
+ (TRouteParamsRT extends RouteParamsRT
+ ? DecodedRequestParamsOfType
+ : {})) => Promise;
+} & TRouteCreateOptions;
+
+export interface ServerRouteRepository<
+ TRouteHandlerResources extends ServerRouteHandlerResources = ServerRouteHandlerResources,
+ TRouteCreateOptions extends ServerRouteCreateOptions = ServerRouteCreateOptions,
+ TRouteState extends RouteState = RouteState
+> {
+ add<
+ TEndpoint extends string,
+ TReturnType,
+ TRouteParamsRT extends RouteParamsRT | undefined = undefined
+ >(
+ route: ServerRoute<
+ TEndpoint,
+ TRouteParamsRT,
+ TRouteHandlerResources,
+ TReturnType,
+ TRouteCreateOptions
+ >
+ ): ServerRouteRepository<
+ TRouteHandlerResources,
+ TRouteCreateOptions,
+ TRouteState &
+ {
+ [key in TEndpoint]: ServerRoute<
+ TEndpoint,
+ TRouteParamsRT,
+ TRouteHandlerResources,
+ TReturnType,
+ TRouteCreateOptions
+ >;
+ }
+ >;
+ merge<
+ TServerRouteRepository extends ServerRouteRepository<
+ TRouteHandlerResources,
+ TRouteCreateOptions
+ >
+ >(
+ repository: TServerRouteRepository
+ ): TServerRouteRepository extends ServerRouteRepository<
+ TRouteHandlerResources,
+ TRouteCreateOptions,
+ infer TRouteStateToMerge
+ >
+ ? ServerRouteRepository<
+ TRouteHandlerResources,
+ TRouteCreateOptions,
+ TRouteState & TRouteStateToMerge
+ >
+ : never;
+ getRoutes: () => Array<
+ ServerRoute
+ >;
+}
+
+type ClientRequestParamsOfType<
+ TRouteParamsRT extends RouteParamsRT
+> = TRouteParamsRT extends t.Mixed
+ ? MaybeOptional<{
+ params: t.OutputOf;
+ }>
+ : {};
+
+type DecodedRequestParamsOfType<
+ TRouteParamsRT extends RouteParamsRT
+> = TRouteParamsRT extends t.Mixed
+ ? MaybeOptional<{
+ params: t.TypeOf;
+ }>
+ : {};
+
+export type EndpointOf<
+ TServerRouteRepository extends ServerRouteRepository
+> = TServerRouteRepository extends ServerRouteRepository
+ ? keyof TRouteState
+ : never;
+
+export type ReturnOf<
+ TServerRouteRepository extends ServerRouteRepository,
+ TEndpoint extends EndpointOf
+> = TServerRouteRepository extends ServerRouteRepository
+ ? TEndpoint extends keyof TRouteState
+ ? TRouteState[TEndpoint] extends ServerRoute<
+ any,
+ any,
+ any,
+ infer TReturnType,
+ ServerRouteCreateOptions
+ >
+ ? TReturnType
+ : never
+ : never
+ : never;
+
+export type DecodedRequestParamsOf<
+ TServerRouteRepository extends ServerRouteRepository,
+ TEndpoint extends EndpointOf
+> = TServerRouteRepository extends ServerRouteRepository
+ ? TEndpoint extends keyof TRouteState
+ ? TRouteState[TEndpoint] extends ServerRoute<
+ any,
+ infer TRouteParamsRT,
+ any,
+ any,
+ ServerRouteCreateOptions
+ >
+ ? TRouteParamsRT extends RouteParamsRT
+ ? DecodedRequestParamsOfType
+ : {}
+ : never
+ : never
+ : never;
+
+export type ClientRequestParamsOf<
+ TServerRouteRepository extends ServerRouteRepository,
+ TEndpoint extends EndpointOf
+> = TServerRouteRepository extends ServerRouteRepository
+ ? TEndpoint extends keyof TRouteState
+ ? TRouteState[TEndpoint] extends ServerRoute<
+ any,
+ infer TRouteParamsRT,
+ any,
+ any,
+ ServerRouteCreateOptions
+ >
+ ? TRouteParamsRT extends RouteParamsRT
+ ? ClientRequestParamsOfType
+ : {}
+ : never
+ : never
+ : never;
+
+export type RouteRepositoryClient<
+ TServerRouteRepository extends ServerRouteRepository,
+ TAdditionalClientOptions extends Record
+> = >(
+ options: {
+ endpoint: TEndpoint;
+ } & ClientRequestParamsOf &
+ TAdditionalClientOptions
+) => Promise>;
diff --git a/packages/kbn-server-route-repository/tsconfig.json b/packages/kbn-server-route-repository/tsconfig.json
new file mode 100644
index 0000000000000..8f1e72172c675
--- /dev/null
+++ b/packages/kbn-server-route-repository/tsconfig.json
@@ -0,0 +1,20 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "incremental": false,
+ "outDir": "./target",
+ "stripInternal": false,
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-server-route-repository/src",
+ "types": [
+ "jest",
+ "node"
+ ],
+ "noUnusedLocals": false
+ },
+ "include": [
+ "./src/**/*.ts"
+ ]
+}
diff --git a/packages/kbn-telemetry-tools/src/tools/tasks/index.ts b/packages/kbn-telemetry-tools/src/tools/tasks/index.ts
index 5d946b73d9759..f55a9aa80d40d 100644
--- a/packages/kbn-telemetry-tools/src/tools/tasks/index.ts
+++ b/packages/kbn-telemetry-tools/src/tools/tasks/index.ts
@@ -7,7 +7,9 @@
*/
export { ErrorReporter } from './error_reporter';
-export { TaskContext, createTaskContext } from './task_context';
+
+export type { TaskContext } from './task_context';
+export { createTaskContext } from './task_context';
export { parseConfigsTask } from './parse_configs_task';
export { extractCollectorsTask } from './extract_collectors_task';
diff --git a/packages/kbn-telemetry-tools/tsconfig.json b/packages/kbn-telemetry-tools/tsconfig.json
index 39946fe9907e5..419af1d02f83b 100644
--- a/packages/kbn-telemetry-tools/tsconfig.json
+++ b/packages/kbn-telemetry-tools/tsconfig.json
@@ -6,7 +6,8 @@
"declaration": true,
"declarationMap": true,
"sourceMap": true,
- "sourceRoot": "../../../../packages/kbn-telemetry-tools/src"
+ "sourceRoot": "../../../../packages/kbn-telemetry-tools/src",
+ "isolatedModules": true
},
"include": [
"src/**/*",
diff --git a/packages/kbn-ui-shared-deps/polyfills.js b/packages/kbn-ui-shared-deps/polyfills.js
index abbf911cfc8fc..a9ec32023f2bf 100644
--- a/packages/kbn-ui-shared-deps/polyfills.js
+++ b/packages/kbn-ui-shared-deps/polyfills.js
@@ -8,7 +8,6 @@
require('core-js/stable');
require('regenerator-runtime/runtime');
-require('custom-event-polyfill');
if (typeof window.Event === 'object') {
// IE11 doesn't support unknown event types, required by react-use
@@ -17,6 +16,4 @@ if (typeof window.Event === 'object') {
}
require('whatwg-fetch');
-require('abortcontroller-polyfill/dist/polyfill-patch-fetch');
-require('./vendor/childnode_remove_polyfill');
require('symbol-observable');
diff --git a/packages/kbn-ui-shared-deps/vendor/childnode_remove_polyfill.js b/packages/kbn-ui-shared-deps/vendor/childnode_remove_polyfill.js
deleted file mode 100644
index d8818fe809ccb..0000000000000
--- a/packages/kbn-ui-shared-deps/vendor/childnode_remove_polyfill.js
+++ /dev/null
@@ -1,48 +0,0 @@
-/* eslint-disable @kbn/eslint/require-license-header */
-
-/* @notice
- * This product bundles childnode-remove which is available under a
- * "MIT" license.
- *
- * The MIT License (MIT)
- *
- * Copyright (c) 2016-present, jszhou
- * https://github.com/jserz/js_piece
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-/* eslint-disable */
-
-(function (arr) {
- arr.forEach(function (item) {
- if (item.hasOwnProperty('remove')) {
- return;
- }
- Object.defineProperty(item, 'remove', {
- configurable: true,
- enumerable: true,
- writable: true,
- value: function remove() {
- if (this.parentNode !== null)
- this.parentNode.removeChild(this);
- }
- });
- });
-})([Element.prototype, CharacterData.prototype, DocumentType.prototype]);
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index b179c998f1126..baf8ed2a61645 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -209,6 +209,7 @@ export class DocLinksService {
indexThreshold: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/rule-type-index-threshold.html`,
pagerDutyAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/pagerduty-action-type.html`,
preconfiguredConnectors: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/pre-configured-connectors.html`,
+ preconfiguredAlertHistoryConnector: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/index-action-type.html#preconfigured-connector-alert-history`,
serviceNowAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/servicenow-action-type.html#configuring-servicenow`,
setupPrerequisites: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/alerting-getting-started.html#alerting-setup-prerequisites`,
slackAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/slack-action-type.html#configuring-slack`,
diff --git a/src/core/server/config/ensure_valid_configuration.test.ts b/src/core/server/config/ensure_valid_configuration.test.ts
index 474e8dd59b4c4..f1006f93dbc2d 100644
--- a/src/core/server/config/ensure_valid_configuration.test.ts
+++ b/src/core/server/config/ensure_valid_configuration.test.ts
@@ -16,14 +16,40 @@ describe('ensureValidConfiguration', () => {
beforeEach(() => {
jest.clearAllMocks();
configService = configServiceMock.create();
- configService.getUsedPaths.mockReturnValue(Promise.resolve(['core', 'elastic']));
+
+ configService.validate.mockResolvedValue();
+ configService.getUsedPaths.mockReturnValue(Promise.resolve([]));
});
- it('returns normally when there is no unused keys', async () => {
- configService.getUnusedPaths.mockResolvedValue([]);
+ it('returns normally when there is no unused keys and when the config validates', async () => {
await expect(ensureValidConfiguration(configService as any)).resolves.toBeUndefined();
});
+ it('throws when config validation fails', async () => {
+ configService.validate.mockImplementation(() => {
+ throw new Error('some message');
+ });
+
+ await expect(ensureValidConfiguration(configService as any)).rejects.toMatchInlineSnapshot(
+ `[Error: some message]`
+ );
+ });
+
+ it('throws a `CriticalError` with the correct processExitCode value when config validation fails', async () => {
+ expect.assertions(2);
+
+ configService.validate.mockImplementation(() => {
+ throw new Error('some message');
+ });
+
+ try {
+ await ensureValidConfiguration(configService as any);
+ } catch (e) {
+ expect(e).toBeInstanceOf(CriticalError);
+ expect(e.processExitCode).toEqual(78);
+ }
+ });
+
it('throws when there are some unused keys', async () => {
configService.getUnusedPaths.mockResolvedValue(['some.key', 'some.other.key']);
@@ -44,4 +70,18 @@ describe('ensureValidConfiguration', () => {
expect(e.processExitCode).toEqual(64);
}
});
+
+ it('does not throw when all unused keys are included in the ignored paths', async () => {
+ configService.getUnusedPaths.mockResolvedValue(['dev.someDevKey', 'elastic.apm.enabled']);
+
+ await expect(ensureValidConfiguration(configService as any)).resolves.toBeUndefined();
+ });
+
+ it('throws when only some keys are included in the ignored paths', async () => {
+ configService.getUnusedPaths.mockResolvedValue(['dev.someDevKey', 'some.key']);
+
+ await expect(ensureValidConfiguration(configService as any)).rejects.toMatchInlineSnapshot(
+ `[Error: Unknown configuration key(s): "some.key". Check for spelling errors and ensure that expected plugins are installed.]`
+ );
+ });
});
diff --git a/src/core/server/config/ensure_valid_configuration.ts b/src/core/server/config/ensure_valid_configuration.ts
index a33625cc0841d..c7a4721b7d2ae 100644
--- a/src/core/server/config/ensure_valid_configuration.ts
+++ b/src/core/server/config/ensure_valid_configuration.ts
@@ -9,22 +9,27 @@
import { ConfigService } from '@kbn/config';
import { CriticalError } from '../errors';
+const ignoredPaths = ['dev.', 'elastic.apm.'];
+
+const invalidConfigExitCode = 78;
+const legacyInvalidConfigExitCode = 64;
+
export async function ensureValidConfiguration(configService: ConfigService) {
- await configService.validate();
+ try {
+ await configService.validate();
+ } catch (e) {
+ throw new CriticalError(e.message, 'InvalidConfig', invalidConfigExitCode, e);
+ }
- const unusedConfigKeys = await configService.getUnusedPaths();
+ const unusedPaths = await configService.getUnusedPaths();
+ const unusedConfigKeys = unusedPaths.filter((unusedPath) => {
+ return !ignoredPaths.some((ignoredPath) => unusedPath.startsWith(ignoredPath));
+ });
if (unusedConfigKeys.length > 0) {
const message = `Unknown configuration key(s): ${unusedConfigKeys
.map((key) => `"${key}"`)
.join(', ')}. Check for spelling errors and ensure that expected plugins are installed.`;
- throw new InvalidConfigurationError(message);
- }
-}
-
-class InvalidConfigurationError extends CriticalError {
- constructor(message: string) {
- super(message, 'InvalidConfig', 64);
- Object.setPrototypeOf(this, InvalidConfigurationError.prototype);
+ throw new CriticalError(message, 'InvalidConfig', legacyInvalidConfigExitCode);
}
}
diff --git a/src/core/server/server.ts b/src/core/server/server.ts
index b34d7fec3dcbf..45d11f9013fed 100644
--- a/src/core/server/server.ts
+++ b/src/core/server/server.ts
@@ -36,7 +36,6 @@ import { config as cspConfig } from './csp';
import { config as elasticsearchConfig } from './elasticsearch';
import { config as httpConfig } from './http';
import { config as loggingConfig } from './logging';
-import { config as devConfig } from './dev';
import { config as kibanaConfig } from './kibana_config';
import { savedObjectsConfig, savedObjectsMigrationConfig } from './saved_objects';
import { config as uiSettingsConfig } from './ui_settings';
@@ -303,7 +302,6 @@ export class Server {
loggingConfig,
httpConfig,
pluginsConfig,
- devConfig,
kibanaConfig,
savedObjectsConfig,
savedObjectsMigrationConfig,
diff --git a/src/core/server/ui_settings/settings/theme.ts b/src/core/server/ui_settings/settings/theme.ts
index cc2919f7555c2..4d2c45a9c84b0 100644
--- a/src/core/server/ui_settings/settings/theme.ts
+++ b/src/core/server/ui_settings/settings/theme.ts
@@ -48,6 +48,8 @@ export const getThemeSettings = (
): Record => {
const { availableVersions, defaultDarkMode, defaultVersion } = getThemeInfo(options);
+ const onlyOneThemeAvailable = !options?.isDist && availableVersions.length === 1;
+
return {
'theme:darkMode': {
name: i18n.translate('core.ui_settings.params.darkModeTitle', {
@@ -68,10 +70,21 @@ export const getThemeSettings = (
type: 'select',
options: availableVersions,
description: i18n.translate('core.ui_settings.params.themeVersionText', {
- defaultMessage: `Switch between the theme used for the current and next version of Kibana. A page refresh is required for the setting to be applied.`,
+ defaultMessage:
+ 'Switch between the theme used for the current and next version of Kibana. A page refresh is required for the setting to be applied. {lessOptions}',
+ values: {
+ lessOptions: onlyOneThemeAvailable
+ ? '
There is only one theme available, set KBN_OPTIMIZER_THEMES=v7light,v7dark,v8light,v8dark
to get more options.'
+ : undefined,
+ },
}),
requiresPageReload: true,
schema: schema.oneOf(availableVersions.map((v) => schema.literal(v)) as [Type]),
+ optionLabels: onlyOneThemeAvailable
+ ? {
+ [availableVersions[0]]: `${availableVersions[0]} (only)`,
+ }
+ : undefined,
},
};
};
diff --git a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
index 6cc94208fbcce..1ad1559288992 100755
--- a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
+++ b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
@@ -159,6 +159,7 @@ kibana_vars=(
xpack.actions.allowedHosts
xpack.actions.enabled
xpack.actions.enabledActionTypes
+ xpack.actions.preconfiguredAlertHistoryEsIndex
xpack.actions.preconfigured
xpack.actions.proxyHeaders
xpack.actions.proxyRejectUnauthorizedCertificates
diff --git a/src/plugins/dashboard/public/application/dashboard_app.tsx b/src/plugins/dashboard/public/application/dashboard_app.tsx
index 3d6f08f321977..e7e2ccfd46b9c 100644
--- a/src/plugins/dashboard/public/application/dashboard_app.tsx
+++ b/src/plugins/dashboard/public/application/dashboard_app.tsx
@@ -10,7 +10,7 @@ import { History } from 'history';
import { merge, Subject, Subscription } from 'rxjs';
import React, { useCallback, useEffect, useMemo, useState } from 'react';
-import { debounceTime, tap } from 'rxjs/operators';
+import { debounceTime, finalize, switchMap, tap } from 'rxjs/operators';
import { useKibana } from '../../../kibana_react/public';
import { DashboardConstants } from '../dashboard_constants';
import { DashboardTopNav } from './top_nav/dashboard_top_nav';
@@ -30,7 +30,7 @@ import {
useSavedDashboard,
} from './hooks';
-import { IndexPattern } from '../services/data';
+import { IndexPattern, waitUntilNextSessionCompletes$ } from '../services/data';
import { EmbeddableRenderer } from '../services/embeddable';
import { DashboardContainerInput } from '.';
import { leaveConfirmStrings } from '../dashboard_strings';
@@ -209,14 +209,26 @@ export function DashboardApp({
);
subscriptions.add(
- merge(
- data.query.timefilter.timefilter.getAutoRefreshFetch$(),
- searchSessionIdQuery$
- ).subscribe(() => {
+ searchSessionIdQuery$.subscribe(() => {
triggerRefresh$.next({ force: true });
})
);
+ subscriptions.add(
+ data.query.timefilter.timefilter
+ .getAutoRefreshFetch$()
+ .pipe(
+ tap(() => {
+ triggerRefresh$.next({ force: true });
+ }),
+ switchMap((done) =>
+ // best way on a dashboard to estimate that panels are updated is to rely on search session service state
+ waitUntilNextSessionCompletes$(data.search.session).pipe(finalize(done))
+ )
+ )
+ .subscribe()
+ );
+
dashboardStateManager.registerChangeListener(() => {
setUnsavedChanges(dashboardStateManager.getIsDirty(data.query.timefilter.timefilter));
// we aren't checking dirty state because there are changes the container needs to know about
diff --git a/src/plugins/data/README.mdx b/src/plugins/data/README.mdx
index 60e74a3fa126c..30006e2b497bd 100644
--- a/src/plugins/data/README.mdx
+++ b/src/plugins/data/README.mdx
@@ -5,7 +5,7 @@ title: Data services
image: https://source.unsplash.com/400x175/?Search
summary: The data plugin contains services for searching, querying and filtering.
date: 2020-12-02
-tags: ['kibana','dev', 'contributor', 'api docs']
+tags: ['kibana', 'dev', 'contributor', 'api docs']
---
# data
@@ -149,7 +149,6 @@ Index patterns provide Rest-like HTTP CRUD+ API with the following endpoints:
- Remove a scripted field — `DELETE /api/index_patterns/index_pattern/{id}/scripted_field/{name}`
- Update a scripted field — `POST /api/index_patterns/index_pattern/{id}/scripted_field/{name}`
-
### Index Patterns API
Index Patterns REST API allows you to create, retrieve and delete index patterns. I also
@@ -212,11 +211,10 @@ The endpoint returns the created index pattern object.
```json
{
- "index_pattern": {}
+ "index_pattern": {}
}
```
-
#### Fetch an index pattern by ID
Retrieve an index pattern by its ID.
@@ -229,23 +227,22 @@ Returns an index pattern object.
```json
{
- "index_pattern": {
- "id": "...",
- "version": "...",
- "title": "...",
- "type": "...",
- "intervalName": "...",
- "timeFieldName": "...",
- "sourceFilters": [],
- "fields": {},
- "typeMeta": {},
- "fieldFormats": {},
- "fieldAttrs": {}
- }
+ "index_pattern": {
+ "id": "...",
+ "version": "...",
+ "title": "...",
+ "type": "...",
+ "intervalName": "...",
+ "timeFieldName": "...",
+ "sourceFilters": [],
+ "fields": {},
+ "typeMeta": {},
+ "fieldFormats": {},
+ "fieldAttrs": {}
+ }
}
```
-
#### Delete an index pattern by ID
Delete and index pattern by its ID.
@@ -256,21 +253,21 @@ DELETE /api/index_patterns/index_pattern/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
Returns an '200 OK` response with empty body on success.
-
#### Partially update an index pattern by ID
Update part of an index pattern. Only provided fields will be updated on the
index pattern, missing fields will stay as they are persisted.
These fields can be update partially:
- - `title`
- - `timeFieldName`
- - `intervalName`
- - `fields` (optionally refresh fields)
- - `sourceFilters`
- - `fieldFormatMap`
- - `type`
- - `typeMeta`
+
+- `title`
+- `timeFieldName`
+- `intervalName`
+- `fields` (optionally refresh fields)
+- `sourceFilters`
+- `fieldFormatMap`
+- `type`
+- `typeMeta`
Update a title of an index pattern.
@@ -318,18 +315,14 @@ This endpoint returns the updated index pattern object.
```json
{
- "index_pattern": {
-
- }
+ "index_pattern": {}
}
```
-
### Fields API
Fields API allows to change field metadata, such as `count`, `customLabel`, and `format`.
-
#### Update fields
Update endpoint allows you to update fields presentation metadata, such as `count`,
@@ -383,13 +376,10 @@ This endpoint returns the updated index pattern object.
```json
{
- "index_pattern": {
-
- }
+ "index_pattern": {}
}
```
-
### Scripted Fields API
Scripted Fields API provides CRUD API for scripted fields of an index pattern.
@@ -487,7 +477,7 @@ Returns the field object.
```json
{
- "field": {}
+ "field": {}
}
```
@@ -529,47 +519,86 @@ POST /api/index_patterns/index_pattern/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/scri
}
```
-
## Query
The query service is responsible for managing the configuration of a search query (`QueryState`): filters, time range, query string, and settings such as the auto refresh behavior and saved queries.
It contains sub-services for each of those configurations:
- - `data.query.filterManager` - Manages the `filters` component of a `QueryState`. The global filter state (filters that are persisted between applications) are owned by this service.
- - `data.query.timefilter` - Responsible for the time range filter and the auto refresh behavior settings.
- - `data.query.queryString` - Responsible for the query string and query language settings.
- - `data.query.savedQueries` - Responsible for persisting a `QueryState` into a `SavedObject`, so it can be restored and used by other applications.
- Any changes to the `QueryState` are published on the `data.query.state$`, which is useful when wanting to persist global state or run a search upon data changes.
+- `data.query.filterManager` - Manages the `filters` component of a `QueryState`. The global filter state (filters that are persisted between applications) are owned by this service.
+- `data.query.timefilter` - Responsible for the time range filter and the auto refresh behavior settings.
+- `data.query.queryString` - Responsible for the query string and query language settings.
+- `data.query.savedQueries` - Responsible for persisting a `QueryState` into a `SavedObject`, so it can be restored and used by other applications.
- A simple use case is:
+Any changes to the `QueryState` are published on the `data.query.state$`, which is useful when wanting to persist global state or run a search upon data changes.
- ```.ts
- function searchOnChange(indexPattern: IndexPattern, aggConfigs: AggConfigs) {
- data.query.state$.subscribe(() => {
+A simple use case is:
- // Constuct the query portion of the search request
- const query = data.query.getEsQuery(indexPattern);
+```.ts
+function searchOnChange(indexPattern: IndexPattern, aggConfigs: AggConfigs) {
+ data.query.state$.subscribe(() => {
+
+ // Constuct the query portion of the search request
+ const query = data.query.getEsQuery(indexPattern);
+
+ // Construct a request
+ const request = {
+ params: {
+ index: indexPattern.title,
+ body: {
+ aggs: aggConfigs.toDsl(),
+ query,
+ },
+ },
+ };
+
+ // Search with the `data.query` config
+ const search$ = data.search.search(request);
+
+ ...
+ });
+}
- // Construct a request
- const request = {
- params: {
- index: indexPattern.title,
- body: {
- aggs: aggConfigs.toDsl(),
- query,
- },
- },
- };
+```
- // Search with the `data.query` config
- const search$ = data.search.search(request);
+### Timefilter
- ...
- });
- }
+`data.query.timefilter` is responsible for the time range filter and the auto refresh behavior settings.
+
+#### Autorefresh
- ```
+Timefilter provides an API for setting and getting current auto refresh state:
+
+```ts
+const { pause, value } = data.query.timefilter.timefilter.getRefreshInterval();
+
+data.query.timefilter.timefilter.setRefreshInterval({ pause: false, value: 5000 }); // start auto refresh with 5 seconds interval
+```
+
+Timefilter API also provides an `autoRefreshFetch$` observables that apps should use to get notified
+when it is time to refresh data because of auto refresh.
+This API expects apps to confirm when they are done with reloading the data.
+The confirmation mechanism is needed to prevent excessive queue of fetches.
+
+```
+import { refetchData } from '../my-app'
+
+const autoRefreshFetch$ = data.query.timefilter.timefilter.getAutoRefreshFetch$()
+autoRefreshFetch$.subscribe((done) => {
+ try {
+ await refetchData();
+ } finally {
+ // confirm that data fetching was finished
+ done();
+ }
+})
+
+function unmount() {
+ // don't forget to unsubscribe when leaving the app
+ autoRefreshFetch$.unsubscribe()
+}
+
+```
## Search
diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts
index 805eccd1ee31b..04d2785137719 100644
--- a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts
+++ b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts
@@ -535,6 +535,9 @@ export class IndexPatternsService {
});
indexPattern.id = response.id;
this.indexPatternCache.set(indexPattern.id, Promise.resolve(indexPattern));
+ if (this.savedObjectsCache) {
+ this.savedObjectsCache.push(response as SavedObject);
+ }
return indexPattern;
}
diff --git a/src/plugins/data/common/kbn_field_types/types.ts b/src/plugins/data/common/kbn_field_types/types.ts
index c46e5c5266f55..e6f815e058ce3 100644
--- a/src/plugins/data/common/kbn_field_types/types.ts
+++ b/src/plugins/data/common/kbn_field_types/types.ts
@@ -80,4 +80,5 @@ export enum KBN_FIELD_TYPES {
OBJECT = 'object',
NESTED = 'nested',
HISTOGRAM = 'histogram',
+ MISSING = 'missing',
}
diff --git a/src/plugins/data/common/search/aggs/agg_configs.test.ts b/src/plugins/data/common/search/aggs/agg_configs.test.ts
index 297af560081b1..3ce528e6ed893 100644
--- a/src/plugins/data/common/search/aggs/agg_configs.test.ts
+++ b/src/plugins/data/common/search/aggs/agg_configs.test.ts
@@ -230,7 +230,7 @@ describe('AggConfigs', () => {
describe('#toDsl', () => {
beforeEach(() => {
indexPattern = stubIndexPattern as IndexPattern;
- indexPattern.fields.getByName = (name) => (name as unknown) as IndexPatternField;
+ indexPattern.fields.getByName = (name) => (({ name } as unknown) as IndexPatternField);
});
it('uses the sorted aggs', () => {
diff --git a/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.test.ts b/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.test.ts
index 4e278d5872a3e..56e720d237c45 100644
--- a/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.test.ts
+++ b/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.test.ts
@@ -16,16 +16,33 @@ import { AggConfigs, CreateAggConfigParams } from '../agg_configs';
import { BUCKET_TYPES } from './bucket_agg_types';
import { IBucketAggConfig } from './bucket_agg_type';
import { mockAggTypesRegistry } from '../test_helpers';
+import type { IndexPatternField } from '../../../index_patterns';
+import { IndexPattern } from '../../../index_patterns/index_patterns/index_pattern';
const indexPattern = {
id: '1234',
title: 'logstash-*',
fields: [
{
- name: 'field',
+ name: 'machine.os.raw',
+ type: 'string',
+ esTypes: ['string'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
+ },
+ {
+ name: 'geo.src',
+ type: 'string',
+ esTypes: ['string'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
},
],
-} as any;
+} as IndexPattern;
+
+indexPattern.fields.getByName = (name) => (({ name } as unknown) as IndexPatternField);
const singleTerm = {
aggs: [
diff --git a/src/plugins/data/common/search/aggs/buckets/terms.test.ts b/src/plugins/data/common/search/aggs/buckets/terms.test.ts
index bb34d7ede453c..09dfbb28a4e53 100644
--- a/src/plugins/data/common/search/aggs/buckets/terms.test.ts
+++ b/src/plugins/data/common/search/aggs/buckets/terms.test.ts
@@ -10,6 +10,8 @@ import { AggConfigs } from '../agg_configs';
import { METRIC_TYPES } from '../metrics';
import { mockAggTypesRegistry } from '../test_helpers';
import { BUCKET_TYPES } from './bucket_agg_types';
+import type { IndexPatternField } from '../../../index_patterns';
+import { IndexPattern } from '../../../index_patterns/index_patterns/index_pattern';
describe('Terms Agg', () => {
describe('order agg editor UI', () => {
@@ -17,16 +19,44 @@ describe('Terms Agg', () => {
const indexPattern = {
id: '1234',
title: 'logstash-*',
- fields: {
- getByName: () => field,
- filter: () => [field],
- },
- } as any;
+ fields: [
+ {
+ name: 'field',
+ type: 'string',
+ esTypes: ['string'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
+ },
+ {
+ name: 'string_field',
+ type: 'string',
+ esTypes: ['string'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
+ },
+ {
+ name: 'empty_number_field',
+ type: 'number',
+ esTypes: ['number'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
+ },
+ {
+ name: 'number_field',
+ type: 'number',
+ esTypes: ['number'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
+ },
+ ],
+ } as IndexPattern;
- const field = {
- name: 'field',
- indexPattern,
- };
+ indexPattern.fields.getByName = (name) => (({ name } as unknown) as IndexPatternField);
+ indexPattern.fields.filter = () => indexPattern.fields;
return new AggConfigs(
indexPattern,
@@ -207,16 +237,28 @@ describe('Terms Agg', () => {
const indexPattern = {
id: '1234',
title: 'logstash-*',
- fields: {
- getByName: () => field,
- filter: () => [field],
- },
- } as any;
+ fields: [
+ {
+ name: 'string_field',
+ type: 'string',
+ esTypes: ['string'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
+ },
+ {
+ name: 'number_field',
+ type: 'number',
+ esTypes: ['number'],
+ aggregatable: true,
+ filterable: true,
+ searchable: true,
+ },
+ ],
+ } as IndexPattern;
- const field = {
- name: 'field',
- indexPattern,
- };
+ indexPattern.fields.getByName = (name) => (({ name } as unknown) as IndexPatternField);
+ indexPattern.fields.filter = () => indexPattern.fields;
const aggConfigs = new AggConfigs(
indexPattern,
diff --git a/src/plugins/data/common/search/aggs/buckets/terms.ts b/src/plugins/data/common/search/aggs/buckets/terms.ts
index 7d37dc83405b8..77c9c6e391c0a 100644
--- a/src/plugins/data/common/search/aggs/buckets/terms.ts
+++ b/src/plugins/data/common/search/aggs/buckets/terms.ts
@@ -8,7 +8,6 @@
import { noop } from 'lodash';
import { i18n } from '@kbn/i18n';
-import type { RequestAdapter } from 'src/plugins/inspector/common';
import { BucketAggType, IBucketAggConfig } from './bucket_agg_type';
import { BUCKET_TYPES } from './bucket_agg_types';
@@ -21,7 +20,6 @@ import { aggTermsFnName } from './terms_fn';
import { AggConfigSerialized, BaseAggParams } from '../types';
import { KBN_FIELD_TYPES } from '../../../../common';
-import { getRequestInspectorStats, getResponseInspectorStats } from '../../expressions';
import {
buildOtherBucketAgg,
@@ -103,36 +101,28 @@ export const getTermsBucketAgg = () =>
nestedSearchSource.setField('aggs', filterAgg);
- let request: ReturnType | undefined;
- if (inspectorRequestAdapter) {
- request = inspectorRequestAdapter.start(
- i18n.translate('data.search.aggs.buckets.terms.otherBucketTitle', {
- defaultMessage: 'Other bucket',
+ const requestResponder = inspectorRequestAdapter?.start(
+ i18n.translate('data.search.aggs.buckets.terms.otherBucketTitle', {
+ defaultMessage: 'Other bucket',
+ }),
+ {
+ description: i18n.translate('data.search.aggs.buckets.terms.otherBucketDescription', {
+ defaultMessage:
+ 'This request counts the number of documents that fall ' +
+ 'outside the criterion of the data buckets.',
}),
- {
- description: i18n.translate('data.search.aggs.buckets.terms.otherBucketDescription', {
- defaultMessage:
- 'This request counts the number of documents that fall ' +
- 'outside the criterion of the data buckets.',
- }),
- searchSessionId,
- }
- );
- nestedSearchSource.getSearchRequestBody().then((body) => {
- request!.json(body);
- });
- request.stats(getRequestInspectorStats(nestedSearchSource));
- }
+ searchSessionId,
+ }
+ );
+
+ const response = await nestedSearchSource
+ .fetch$({
+ abortSignal,
+ sessionId: searchSessionId,
+ requestResponder,
+ })
+ .toPromise();
- const response = await nestedSearchSource.fetch({
- abortSignal,
- sessionId: searchSessionId,
- });
- if (request) {
- request
- .stats(getResponseInspectorStats(response, nestedSearchSource))
- .ok({ json: response });
- }
resp = mergeOtherBucketAggResponse(aggConfigs, resp, response, aggConfig, filterAgg());
}
if (aggConfig.params.missingBucket) {
diff --git a/src/plugins/data/common/search/aggs/param_types/field.ts b/src/plugins/data/common/search/aggs/param_types/field.ts
index 2d3ff8f5fdba8..62dac9831211a 100644
--- a/src/plugins/data/common/search/aggs/param_types/field.ts
+++ b/src/plugins/data/common/search/aggs/param_types/field.ts
@@ -8,7 +8,10 @@
import { i18n } from '@kbn/i18n';
import { IAggConfig } from '../agg_config';
-import { SavedObjectNotFound } from '../../../../../../plugins/kibana_utils/common';
+import {
+ SavedFieldNotFound,
+ SavedFieldTypeInvalidForAgg,
+} from '../../../../../../plugins/kibana_utils/common';
import { BaseParamType } from './base';
import { propFilter } from '../utils';
import { KBN_FIELD_TYPES } from '../../../kbn_field_types/types';
@@ -47,13 +50,49 @@ export class FieldParamType extends BaseParamType {
);
}
- if (field.scripted) {
+ if (field.type === KBN_FIELD_TYPES.MISSING) {
+ throw new SavedFieldNotFound(
+ i18n.translate(
+ 'data.search.aggs.paramTypes.field.notFoundSavedFieldParameterErrorMessage',
+ {
+ defaultMessage:
+ 'The field "{fieldParameter}" associated with this object no longer exists in the index pattern. Please use another field.',
+ values: {
+ fieldParameter: field.name,
+ },
+ }
+ )
+ );
+ }
+
+ const validField = this.getAvailableFields(aggConfig).find(
+ (f: any) => f.name === field.name
+ );
+
+ if (!validField) {
+ throw new SavedFieldTypeInvalidForAgg(
+ i18n.translate(
+ 'data.search.aggs.paramTypes.field.invalidSavedFieldParameterErrorMessage',
+ {
+ defaultMessage:
+ 'Saved field "{fieldParameter}" of index pattern "{indexPatternTitle}" is invalid for use with the "{aggType}" aggregation. Please select a new field.',
+ values: {
+ fieldParameter: field.name,
+ aggType: aggConfig?.type?.title,
+ indexPatternTitle: aggConfig.getIndexPattern().title,
+ },
+ }
+ )
+ );
+ }
+
+ if (validField.scripted) {
output.params.script = {
- source: field.script,
- lang: field.lang,
+ source: validField.script,
+ lang: validField.lang,
};
} else {
- output.params.field = field.name;
+ output.params.field = validField.name;
}
};
}
@@ -69,28 +108,15 @@ export class FieldParamType extends BaseParamType {
const field = aggConfig.getIndexPattern().fields.getByName(fieldName);
if (!field) {
- throw new SavedObjectNotFound('index-pattern-field', fieldName);
- }
-
- const validField = this.getAvailableFields(aggConfig).find((f: any) => f.name === fieldName);
- if (!validField) {
- throw new Error(
- i18n.translate(
- 'data.search.aggs.paramTypes.field.invalidSavedFieldParameterErrorMessage',
- {
- defaultMessage:
- 'Saved field "{fieldParameter}" of index pattern "{indexPatternTitle}" is invalid for use with the "{aggType}" aggregation. Please select a new field.',
- values: {
- fieldParameter: fieldName,
- aggType: aggConfig?.type?.title,
- indexPatternTitle: aggConfig.getIndexPattern().title,
- },
- }
- )
- );
+ return new IndexPatternField({
+ type: KBN_FIELD_TYPES.MISSING,
+ name: fieldName,
+ searchable: false,
+ aggregatable: false,
+ });
}
- return validField;
+ return field;
};
}
diff --git a/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts b/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts
index 7580032b0dd85..c2566535916a8 100644
--- a/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts
+++ b/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts
@@ -133,7 +133,7 @@ describe('esaggs expression function - public', () => {
test('calls searchSource.fetch', async () => {
await handleRequest(mockParams);
const searchSource = await mockParams.searchSourceService.create();
- expect(searchSource.fetch).toHaveBeenCalledWith({
+ expect(searchSource.fetch$).toHaveBeenCalledWith({
abortSignal: mockParams.abortSignal,
sessionId: mockParams.searchSessionId,
});
diff --git a/src/plugins/data/common/search/expressions/esaggs/request_handler.ts b/src/plugins/data/common/search/expressions/esaggs/request_handler.ts
index 72d9cc4095570..5620698a47538 100644
--- a/src/plugins/data/common/search/expressions/esaggs/request_handler.ts
+++ b/src/plugins/data/common/search/expressions/esaggs/request_handler.ts
@@ -22,7 +22,6 @@ import {
import { IAggConfigs } from '../../aggs';
import { ISearchStartSearchSource } from '../../search_source';
import { tabifyAggResponse } from '../../tabify';
-import { getRequestInspectorStats, getResponseInspectorStats } from '../utils';
/** @internal */
export interface RequestHandlerParams {
@@ -41,6 +40,21 @@ export interface RequestHandlerParams {
getNow?: () => Date;
}
+function getRequestMainResponder(inspectorAdapters: Adapters, searchSessionId?: string) {
+ return inspectorAdapters.requests?.start(
+ i18n.translate('data.functions.esaggs.inspector.dataRequest.title', {
+ defaultMessage: 'Data',
+ }),
+ {
+ description: i18n.translate('data.functions.esaggs.inspector.dataRequest.description', {
+ defaultMessage:
+ 'This request queries Elasticsearch to fetch the data for the visualization.',
+ }),
+ searchSessionId,
+ }
+ );
+}
+
export const handleRequest = async ({
abortSignal,
aggs,
@@ -113,52 +127,19 @@ export const handleRequest = async ({
requestSearchSource.setField('filter', filters);
requestSearchSource.setField('query', query);
- let request;
- if (inspectorAdapters.requests) {
- inspectorAdapters.requests.reset();
- request = inspectorAdapters.requests.start(
- i18n.translate('data.functions.esaggs.inspector.dataRequest.title', {
- defaultMessage: 'Data',
- }),
- {
- description: i18n.translate('data.functions.esaggs.inspector.dataRequest.description', {
- defaultMessage:
- 'This request queries Elasticsearch to fetch the data for the visualization.',
- }),
- searchSessionId,
- }
- );
- request.stats(getRequestInspectorStats(requestSearchSource));
- }
-
- try {
- const response = await requestSearchSource.fetch({
- abortSignal,
- sessionId: searchSessionId,
- });
-
- if (request) {
- request.stats(getResponseInspectorStats(response, searchSource)).ok({ json: response });
- }
+ inspectorAdapters.requests?.reset();
+ const requestResponder = getRequestMainResponder(inspectorAdapters, searchSessionId);
- (searchSource as any).rawResponse = response;
- } catch (e) {
- // Log any error during request to the inspector
- if (request) {
- request.error({ json: e });
- }
- throw e;
- } finally {
- // Add the request body no matter if things went fine or not
- if (request) {
- request.json(await requestSearchSource.getSearchRequestBody());
- }
- }
+ const response$ = await requestSearchSource.fetch$({
+ abortSignal,
+ sessionId: searchSessionId,
+ requestResponder,
+ });
// Note that rawResponse is not deeply cloned here, so downstream applications using courier
// must take care not to mutate it, or it could have unintended side effects, e.g. displaying
// response data incorrectly in the inspector.
- let response = (searchSource as any).rawResponse;
+ let response = await response$.toPromise();
for (const agg of aggs.aggs) {
if (agg.enabled && typeof agg.type.postFlightRequest === 'function') {
response = await agg.type.postFlightRequest(
diff --git a/src/plugins/data/common/search/expressions/utils/index.ts b/src/plugins/data/common/search/expressions/utils/index.ts
index 2fa54d47445b3..a6ea8da6ac6e9 100644
--- a/src/plugins/data/common/search/expressions/utils/index.ts
+++ b/src/plugins/data/common/search/expressions/utils/index.ts
@@ -6,5 +6,4 @@
* Side Public License, v 1.
*/
-export * from './courier_inspector_stats';
export * from './function_wrapper';
diff --git a/src/plugins/data/common/search/search_source/index.ts b/src/plugins/data/common/search/search_source/index.ts
index 1cb04075dad7a..757e0de6ecb49 100644
--- a/src/plugins/data/common/search/search_source/index.ts
+++ b/src/plugins/data/common/search/search_source/index.ts
@@ -10,6 +10,7 @@ export { createSearchSource } from './create_search_source';
export { injectReferences } from './inject_references';
export { extractReferences } from './extract_references';
export { parseSearchSourceJSON } from './parse_json';
+export { getResponseInspectorStats } from './inspect';
export * from './fetch';
export * from './legacy';
export * from './search_source';
diff --git a/src/core/server/dev/index.ts b/src/plugins/data/common/search/search_source/inspect/index.ts
similarity index 90%
rename from src/core/server/dev/index.ts
rename to src/plugins/data/common/search/search_source/inspect/index.ts
index 70257d2a5e6c5..d5947f8a18cc9 100644
--- a/src/core/server/dev/index.ts
+++ b/src/plugins/data/common/search/search_source/inspect/index.ts
@@ -6,4 +6,4 @@
* Side Public License, v 1.
*/
-export { config } from './dev_config';
+export * from './inspector_stats';
diff --git a/src/plugins/data/common/search/expressions/utils/courier_inspector_stats.ts b/src/plugins/data/common/search/search_source/inspect/inspector_stats.ts
similarity index 97%
rename from src/plugins/data/common/search/expressions/utils/courier_inspector_stats.ts
rename to src/plugins/data/common/search/search_source/inspect/inspector_stats.ts
index 99acbce8935c4..24507a7e13058 100644
--- a/src/plugins/data/common/search/expressions/utils/courier_inspector_stats.ts
+++ b/src/plugins/data/common/search/search_source/inspect/inspector_stats.ts
@@ -15,8 +15,8 @@
import { i18n } from '@kbn/i18n';
import type { estypes } from '@elastic/elasticsearch';
-import { ISearchSource } from 'src/plugins/data/public';
-import { RequestStatistics } from 'src/plugins/inspector/common';
+import type { ISearchSource } from 'src/plugins/data/public';
+import type { RequestStatistics } from 'src/plugins/inspector/common';
/** @public */
export function getRequestInspectorStats(searchSource: ISearchSource) {
diff --git a/src/plugins/data/common/search/search_source/search_source.test.ts b/src/plugins/data/common/search/search_source/search_source.test.ts
index fd97a3d3381a9..3726e5d0c33e8 100644
--- a/src/plugins/data/common/search/search_source/search_source.test.ts
+++ b/src/plugins/data/common/search/search_source/search_source.test.ts
@@ -125,7 +125,7 @@ describe('SearchSource', () => {
}),
} as unknown) as IndexPattern);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.stored_fields).toEqual(['hello']);
expect(request.script_fields).toEqual({ world: {} });
expect(request.fields).toEqual(['@timestamp']);
@@ -144,7 +144,7 @@ describe('SearchSource', () => {
searchSource.setField('fields', ['@timestamp']);
searchSource.setField('fieldsFromSource', ['foo']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request).not.toHaveProperty('docvalue_fields');
});
@@ -160,7 +160,7 @@ describe('SearchSource', () => {
// @ts-expect-error TS won't like using this field name, but technically it's possible.
searchSource.setField('docvalue_fields', ['world']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request).toHaveProperty('docvalue_fields');
expect(request.docvalue_fields).toEqual(['world']);
});
@@ -179,7 +179,7 @@ describe('SearchSource', () => {
searchSource.setField('fields', ['c']);
searchSource.setField('fieldsFromSource', ['a', 'b', 'd']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request).toHaveProperty('docvalue_fields');
expect(request._source.includes).toEqual(['c', 'a', 'b', 'd']);
expect(request.docvalue_fields).toEqual([{ field: 'b', format: 'date_time' }]);
@@ -202,7 +202,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', [{ field: 'hello', format: 'strict_date_time' }]);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request).toHaveProperty('fields');
expect(request.fields).toEqual([{ field: 'hello', format: 'strict_date_time' }]);
});
@@ -218,7 +218,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['hello']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request).toHaveProperty('fields');
expect(request.fields).toEqual([{ field: 'hello', format: 'date_time' }]);
});
@@ -239,7 +239,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', [{ field: 'hello', a: 'a', c: 'c' }]);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request).toHaveProperty('fields');
expect(request.fields).toEqual([
{ field: 'hello', format: 'date_time', a: 'a', b: 'test', c: 'c' },
@@ -258,7 +258,7 @@ describe('SearchSource', () => {
// @ts-expect-error TS won't like using this field name, but technically it's possible.
searchSource.setField('script_fields', { world: {} });
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request).toHaveProperty('script_fields');
expect(request.script_fields).toEqual({
hello: {},
@@ -277,7 +277,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['hello', 'a', { field: 'c' }]);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.script_fields).toEqual({ hello: {} });
expect(request.stored_fields).toEqual(['a', 'c']);
});
@@ -293,7 +293,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['hello', 'a', { foo: 'c' }]);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.script_fields).toEqual({ hello: {} });
expect(request.stored_fields).toEqual(['a']);
});
@@ -309,23 +309,23 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fieldsFromSource', ['hello', 'a']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.script_fields).toEqual({ hello: {} });
expect(request.stored_fields).toEqual(['a']);
});
test('defaults to * for stored fields when no fields are provided', async () => {
- const requestA = await searchSource.getSearchRequestBody();
+ const requestA = searchSource.getSearchRequestBody();
expect(requestA.stored_fields).toEqual(['*']);
searchSource.setField('fields', ['*']);
- const requestB = await searchSource.getSearchRequestBody();
+ const requestB = searchSource.getSearchRequestBody();
expect(requestB.stored_fields).toEqual(['*']);
});
test('defaults to * for stored fields when no fields are provided with fieldsFromSource', async () => {
searchSource.setField('fieldsFromSource', ['*']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.stored_fields).toEqual(['*']);
});
});
@@ -343,7 +343,7 @@ describe('SearchSource', () => {
// @ts-expect-error Typings for excludes filters need to be fixed.
searchSource.setField('source', { excludes: ['exclude-*'] });
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual(['@timestamp']);
});
@@ -357,7 +357,7 @@ describe('SearchSource', () => {
}),
} as unknown) as IndexPattern);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual(['@timestamp']);
});
@@ -372,7 +372,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['hello']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.script_fields).toEqual({ hello: {} });
});
@@ -387,7 +387,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['hello', 'foo']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual(['hello']);
});
@@ -402,7 +402,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['*']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual([{ field: 'field1' }, { field: 'field2' }]);
});
@@ -417,7 +417,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', [{ field: '*', include_unmapped: 'true' }]);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual([{ field: 'field1' }, { field: 'field2' }]);
});
@@ -432,7 +432,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['timestamp', '*']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.script_fields).toEqual({ hello: {}, world: {} });
});
});
@@ -455,7 +455,7 @@ describe('SearchSource', () => {
'bar-b',
]);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request._source).toEqual({
includes: ['@timestamp', 'bar-b'],
});
@@ -473,7 +473,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['hello', '@timestamp', 'foo-a', 'bar']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual(['hello', '@timestamp', 'bar', 'date']);
expect(request.script_fields).toEqual({ hello: {} });
expect(request.stored_fields).toEqual(['@timestamp', 'bar']);
@@ -498,7 +498,7 @@ describe('SearchSource', () => {
'runtime_field',
]);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request._source).toEqual({
includes: ['@timestamp', 'bar'],
});
@@ -520,7 +520,7 @@ describe('SearchSource', () => {
searchSource.setField('fields', ['hello', '@timestamp', 'foo-a', 'bar']);
searchSource.setField('fieldsFromSource', ['foo-b', 'date', 'baz']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request._source).toEqual({
includes: ['@timestamp', 'bar', 'date', 'baz'],
});
@@ -546,7 +546,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['*']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual([
'*',
{ field: '@timestamp', format: 'strict_date_optional_time_nanos' },
@@ -574,7 +574,7 @@ describe('SearchSource', () => {
} as unknown) as IndexPattern);
searchSource.setField('fields', ['*']);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual([
{ field: 'foo-bar' },
{ field: 'field1' },
@@ -592,14 +592,14 @@ describe('SearchSource', () => {
expect(searchSource.getField('source')).toBe(undefined);
searchSource.setField('index', indexPattern);
expect(searchSource.getField('index')).toBe(indexPattern);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request._source).toBe(mockSource);
});
test('removes created searchSource filter on removal', async () => {
searchSource.setField('index', indexPattern);
searchSource.setField('index', undefined);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request._source).toBe(undefined);
});
});
@@ -609,7 +609,7 @@ describe('SearchSource', () => {
searchSource.setField('index', indexPattern);
searchSource.setField('index', indexPattern2);
expect(searchSource.getField('index')).toBe(indexPattern2);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request._source).toBe(mockSource2);
});
@@ -617,7 +617,7 @@ describe('SearchSource', () => {
searchSource.setField('index', indexPattern);
searchSource.setField('index', indexPattern2);
searchSource.setField('index', undefined);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request._source).toBe(undefined);
});
});
@@ -808,7 +808,7 @@ describe('SearchSource', () => {
docvalueFields: [],
}),
} as unknown) as IndexPattern);
- const request = await searchSource.getSearchRequestBody();
+ const request = searchSource.getSearchRequestBody();
expect(request.stored_fields).toEqual(['geometry', 'prop1']);
expect(request.docvalue_fields).toEqual(['prop1']);
expect(request._source).toEqual(['geometry']);
diff --git a/src/plugins/data/common/search/search_source/search_source.ts b/src/plugins/data/common/search/search_source/search_source.ts
index f11e7f06b6ab9..e1e7a8292d677 100644
--- a/src/plugins/data/common/search/search_source/search_source.ts
+++ b/src/plugins/data/common/search/search_source/search_source.ts
@@ -60,7 +60,7 @@
import { setWith } from '@elastic/safer-lodash-set';
import { uniqueId, keyBy, pick, difference, isFunction, isEqual, uniqWith, isObject } from 'lodash';
-import { map, switchMap, tap } from 'rxjs/operators';
+import { catchError, finalize, map, switchMap, tap } from 'rxjs/operators';
import { defer, from } from 'rxjs';
import { normalizeSortRequest } from './normalize_sort_request';
import { fieldWildcardFilter } from '../../../../kibana_utils/common';
@@ -73,6 +73,7 @@ import type {
SearchSourceFields,
} from './types';
import { FetchHandlers, RequestFailure, getSearchParamsFromRequest, SearchRequest } from './fetch';
+import { getRequestInspectorStats, getResponseInspectorStats } from './inspect';
import { getEsQueryConfig, buildEsQuery, Filter, UI_SETTINGS } from '../../../common';
import { getHighlightRequest } from '../../../common/field_formats';
@@ -256,6 +257,9 @@ export class SearchSource {
fetch$(options: ISearchOptions = {}) {
const { getConfig } = this.dependencies;
return defer(() => this.requestIsStarting(options)).pipe(
+ tap(() => {
+ options.requestResponder?.stats(getRequestInspectorStats(this));
+ }),
switchMap(() => {
const searchRequest = this.flatten();
this.history = [searchRequest];
@@ -271,7 +275,17 @@ export class SearchSource {
// TODO: Remove casting when https://github.com/elastic/elasticsearch-js/issues/1287 is resolved
if ((response as any).error) {
throw new RequestFailure(null, response);
+ } else {
+ options.requestResponder?.stats(getResponseInspectorStats(response, this));
+ options.requestResponder?.ok({ json: response });
}
+ }),
+ catchError((e) => {
+ options.requestResponder?.error({ json: e });
+ throw e;
+ }),
+ finalize(() => {
+ options.requestResponder?.json(this.getSearchRequestBody());
})
);
}
@@ -298,9 +312,8 @@ export class SearchSource {
/**
* Returns body contents of the search request, often referred as query DSL.
*/
- async getSearchRequestBody() {
- const searchRequest = await this.flatten();
- return searchRequest.body;
+ getSearchRequestBody() {
+ return this.flatten().body;
}
/**
diff --git a/src/plugins/data/common/search/types.ts b/src/plugins/data/common/search/types.ts
index d77a2ea62bb9a..37de8dc49d3c6 100644
--- a/src/plugins/data/common/search/types.ts
+++ b/src/plugins/data/common/search/types.ts
@@ -9,6 +9,7 @@
import { Observable } from 'rxjs';
import { IEsSearchRequest, IEsSearchResponse } from './es_search';
import { IndexPattern } from '..';
+import type { RequestResponder } from '../../../inspector/common';
export type ISearchGeneric = <
SearchStrategyRequest extends IKibanaSearchRequest = IEsSearchRequest,
@@ -118,6 +119,8 @@ export interface ISearchOptions {
*/
indexPattern?: IndexPattern;
+
+ requestResponder?: RequestResponder;
}
/**
diff --git a/src/plugins/data/public/index.ts b/src/plugins/data/public/index.ts
index c47cd6cd9740d..e86b64d135d59 100644
--- a/src/plugins/data/public/index.ts
+++ b/src/plugins/data/public/index.ts
@@ -314,8 +314,6 @@ import {
boundsDescendingRaw,
getNumberHistogramIntervalByDatatableColumn,
getDateHistogramMetaDataByDatatableColumn,
- // expressions utils
- getRequestInspectorStats,
getResponseInspectorStats,
// tabify
tabifyAggResponse,
@@ -388,6 +386,8 @@ export {
PainlessError,
noSearchSessionStorageCapabilityMessage,
SEARCH_SESSIONS_MANAGEMENT_ID,
+ waitUntilNextSessionCompletes$,
+ WaitUntilNextSessionCompletesOptions,
} from './search';
export type {
@@ -426,7 +426,6 @@ export const search = {
getNumberHistogramIntervalByDatatableColumn,
getDateHistogramMetaDataByDatatableColumn,
},
- getRequestInspectorStats,
getResponseInspectorStats,
tabifyAggResponse,
tabifyGetColumns,
@@ -467,6 +466,7 @@ export {
TimeHistoryContract,
QueryStateChange,
QueryStart,
+ AutoRefreshDoneFn,
} from './query';
export { AggsStart } from './search/aggs';
diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md
index ec24a9296674d..c4e54c64af132 100644
--- a/src/plugins/data/public/public.api.md
+++ b/src/plugins/data/public/public.api.md
@@ -504,6 +504,11 @@ export interface ApplyGlobalFilterActionContext {
// @public (undocumented)
export type AutocompleteStart = ReturnType;
+// Warning: (ae-missing-release-tag) "AutoRefreshDoneFn" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
+//
+// @public (undocumented)
+export type AutoRefreshDoneFn = () => void;
+
// Warning: (ae-forgotten-export) The symbol "DateFormat" needs to be exported by the entry point index.d.ts
// Warning: (ae-forgotten-export) The symbol "DateNanosFormat" needs to be exported by the entry point index.d.ts
// Warning: (ae-missing-release-tag) "baseFormattersPublic" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
@@ -1555,7 +1560,6 @@ export type IndexPatternSelectProps = Required, 'isLo
indexPatternId: string;
fieldTypes?: string[];
onNoIndexPatterns?: () => void;
- maxIndexPatterns?: number;
};
// Warning: (ae-missing-release-tag) "IndexPatternSpec" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
@@ -1672,6 +1676,10 @@ export interface ISearchOptions {
isRestore?: boolean;
isStored?: boolean;
legacyHitsTotal?: boolean;
+ // Warning: (ae-forgotten-export) The symbol "RequestResponder" needs to be exported by the entry point index.d.ts
+ //
+ // (undocumented)
+ requestResponder?: RequestResponder;
sessionId?: string;
strategy?: string;
}
@@ -1781,6 +1789,8 @@ export enum KBN_FIELD_TYPES {
// (undocumented)
IP_RANGE = "ip_range",
// (undocumented)
+ MISSING = "missing",
+ // (undocumented)
MURMUR3 = "murmur3",
// (undocumented)
NESTED = "nested",
@@ -2292,7 +2302,6 @@ export const search: {
timeRange: import("../common").TimeRange | undefined;
} | undefined;
};
- getRequestInspectorStats: typeof getRequestInspectorStats;
getResponseInspectorStats: typeof getResponseInspectorStats;
tabifyAggResponse: typeof tabifyAggResponse;
tabifyGetColumns: typeof tabifyGetColumns;
@@ -2428,7 +2437,7 @@ export class SearchSource {
getId(): string;
getOwnField(field: K): SearchSourceFields[K];
getParent(): SearchSource | undefined;
- getSearchRequestBody(): Promise;
+ getSearchRequestBody(): any;
getSerializedFields(recurse?: boolean): SearchSourceFields;
// Warning: (ae-incompatible-release-tags) The symbol "history" is marked as @public, but its signature references "SearchRequest" which is marked as @internal
//
@@ -2647,6 +2656,18 @@ export const UI_SETTINGS: {
readonly AUTOCOMPLETE_USE_TIMERANGE: "autocomplete:useTimeRange";
};
+// Warning: (ae-missing-release-tag) "waitUntilNextSessionCompletes$" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
+//
+// @public
+export function waitUntilNextSessionCompletes$(sessionService: ISessionService, { waitForIdle }?: WaitUntilNextSessionCompletesOptions): import("rxjs").Observable;
+
+// Warning: (ae-missing-release-tag) "WaitUntilNextSessionCompletesOptions" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
+//
+// @public
+export interface WaitUntilNextSessionCompletesOptions {
+ waitForIdle?: number;
+}
+
// Warnings were encountered during analysis:
//
@@ -2694,7 +2715,6 @@ export const UI_SETTINGS: {
// src/plugins/data/public/index.ts:238:27 - (ae-forgotten-export) The symbol "validateIndexPattern" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/index.ts:238:27 - (ae-forgotten-export) The symbol "flattenHitWrapper" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/index.ts:238:27 - (ae-forgotten-export) The symbol "formatHitProvider" needs to be exported by the entry point index.d.ts
-// src/plugins/data/public/index.ts:404:20 - (ae-forgotten-export) The symbol "getRequestInspectorStats" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/index.ts:404:20 - (ae-forgotten-export) The symbol "getResponseInspectorStats" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/index.ts:404:20 - (ae-forgotten-export) The symbol "tabifyAggResponse" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/index.ts:404:20 - (ae-forgotten-export) The symbol "tabifyGetColumns" needs to be exported by the entry point index.d.ts
diff --git a/src/plugins/data/public/query/timefilter/index.ts b/src/plugins/data/public/query/timefilter/index.ts
index 83e897824d86c..3dfd4e0fe514f 100644
--- a/src/plugins/data/public/query/timefilter/index.ts
+++ b/src/plugins/data/public/query/timefilter/index.ts
@@ -9,7 +9,7 @@
export { TimefilterService, TimefilterSetup } from './timefilter_service';
export * from './types';
-export { Timefilter, TimefilterContract } from './timefilter';
+export { Timefilter, TimefilterContract, AutoRefreshDoneFn } from './timefilter';
export { TimeHistory, TimeHistoryContract } from './time_history';
export { changeTimeFilter, convertRangeFilterToTimeRangeString } from './lib/change_time_filter';
export { extractTimeFilter, extractTimeRange } from './lib/extract_time_filter';
diff --git a/src/plugins/data/public/query/timefilter/lib/auto_refresh_loop.test.ts b/src/plugins/data/public/query/timefilter/lib/auto_refresh_loop.test.ts
new file mode 100644
index 0000000000000..3c8b316c3b878
--- /dev/null
+++ b/src/plugins/data/public/query/timefilter/lib/auto_refresh_loop.test.ts
@@ -0,0 +1,205 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { createAutoRefreshLoop, AutoRefreshDoneFn } from './auto_refresh_loop';
+
+jest.useFakeTimers();
+
+test('triggers refresh with interval', () => {
+ const { loop$, start, stop } = createAutoRefreshLoop();
+
+ const fn = jest.fn((done) => done());
+ loop$.subscribe(fn);
+
+ jest.advanceTimersByTime(5000);
+ expect(fn).not.toBeCalled();
+
+ start(1000);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn).toHaveBeenCalledTimes(1);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn).toHaveBeenCalledTimes(2);
+
+ stop();
+
+ jest.advanceTimersByTime(5000);
+ expect(fn).toHaveBeenCalledTimes(2);
+});
+
+test('waits for done() to be called', () => {
+ const { loop$, start } = createAutoRefreshLoop();
+
+ let done!: AutoRefreshDoneFn;
+ const fn = jest.fn((_done) => {
+ done = _done;
+ });
+ loop$.subscribe(fn);
+ start(1000);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn).toHaveBeenCalledTimes(1);
+ expect(done).toBeInstanceOf(Function);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn).toHaveBeenCalledTimes(1);
+
+ done();
+
+ jest.advanceTimersByTime(500);
+ expect(fn).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn).toHaveBeenCalledTimes(2);
+});
+
+test('waits for done() from multiple subscribers to be called', () => {
+ const { loop$, start } = createAutoRefreshLoop();
+
+ let done1!: AutoRefreshDoneFn;
+ const fn1 = jest.fn((_done) => {
+ done1 = _done;
+ });
+ loop$.subscribe(fn1);
+
+ let done2!: AutoRefreshDoneFn;
+ const fn2 = jest.fn((_done) => {
+ done2 = _done;
+ });
+ loop$.subscribe(fn2);
+
+ start(1000);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ expect(done1).toBeInstanceOf(Function);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+
+ done1();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(1);
+
+ done2();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(2);
+});
+
+test('unsubscribe() resets the state', () => {
+ const { loop$, start } = createAutoRefreshLoop();
+
+ let done1!: AutoRefreshDoneFn;
+ const fn1 = jest.fn((_done) => {
+ done1 = _done;
+ });
+ loop$.subscribe(fn1);
+
+ const fn2 = jest.fn();
+ const sub2 = loop$.subscribe(fn2);
+
+ start(1000);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ expect(done1).toBeInstanceOf(Function);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+
+ done1();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(1);
+
+ sub2.unsubscribe();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(2);
+});
+
+test('calling done() twice is ignored', () => {
+ const { loop$, start } = createAutoRefreshLoop();
+
+ let done1!: AutoRefreshDoneFn;
+ const fn1 = jest.fn((_done) => {
+ done1 = _done;
+ });
+ loop$.subscribe(fn1);
+
+ const fn2 = jest.fn();
+ loop$.subscribe(fn2);
+
+ start(1000);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ expect(done1).toBeInstanceOf(Function);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+
+ done1();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(1);
+
+ done1();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(1);
+});
+
+test('calling older done() is ignored', () => {
+ const { loop$, start } = createAutoRefreshLoop();
+
+ let done1!: AutoRefreshDoneFn;
+ const fn1 = jest.fn((_done) => {
+ // @ts-ignore
+ if (done1) return;
+ done1 = _done;
+ });
+ loop$.subscribe(fn1);
+
+ start(1000);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ expect(done1).toBeInstanceOf(Function);
+
+ jest.advanceTimersByTime(1001);
+ expect(fn1).toHaveBeenCalledTimes(1);
+
+ done1();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(1);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(2);
+
+ done1();
+
+ jest.advanceTimersByTime(500);
+ expect(fn1).toHaveBeenCalledTimes(2);
+ jest.advanceTimersByTime(501);
+ expect(fn1).toHaveBeenCalledTimes(2);
+});
diff --git a/src/plugins/data/public/query/timefilter/lib/auto_refresh_loop.ts b/src/plugins/data/public/query/timefilter/lib/auto_refresh_loop.ts
new file mode 100644
index 0000000000000..1e213b36e1d8b
--- /dev/null
+++ b/src/plugins/data/public/query/timefilter/lib/auto_refresh_loop.ts
@@ -0,0 +1,80 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { defer, Subject } from 'rxjs';
+import { finalize, map } from 'rxjs/operators';
+import { once } from 'lodash';
+
+export type AutoRefreshDoneFn = () => void;
+
+/**
+ * Creates a loop for timepicker's auto refresh
+ * It has a "confirmation" mechanism:
+ * When auto refresh loop emits, it won't continue automatically,
+ * until each subscriber calls received `done` function.
+ *
+ * @internal
+ */
+export const createAutoRefreshLoop = () => {
+ let subscribersCount = 0;
+ const tick = new Subject();
+
+ let _timeoutHandle: number;
+ let _timeout: number = 0;
+
+ function start() {
+ stop();
+ if (_timeout === 0) return;
+ const timeoutHandle = window.setTimeout(() => {
+ let pendingDoneCount = subscribersCount;
+ const done = () => {
+ if (timeoutHandle !== _timeoutHandle) return;
+
+ pendingDoneCount--;
+ if (pendingDoneCount === 0) {
+ start();
+ }
+ };
+ tick.next(done);
+ }, _timeout);
+
+ _timeoutHandle = timeoutHandle;
+ }
+
+ function stop() {
+ window.clearTimeout(_timeoutHandle);
+ _timeoutHandle = -1;
+ }
+
+ return {
+ stop: () => {
+ _timeout = 0;
+ stop();
+ },
+ start: (timeout: number) => {
+ _timeout = timeout;
+ if (subscribersCount > 0) {
+ start();
+ }
+ },
+ loop$: defer(() => {
+ subscribersCount++;
+ start(); // restart the loop on a new subscriber
+ return tick.pipe(map((doneCb) => once(doneCb))); // each subscriber allowed to call done only once
+ }).pipe(
+ finalize(() => {
+ subscribersCount--;
+ if (subscribersCount === 0) {
+ stop();
+ } else {
+ start(); // restart the loop to potentially unblock the interval
+ }
+ })
+ ),
+ };
+};
diff --git a/src/plugins/data/public/query/timefilter/timefilter.test.ts b/src/plugins/data/public/query/timefilter/timefilter.test.ts
index 8e1e76ed19e6d..92ee6b0c30428 100644
--- a/src/plugins/data/public/query/timefilter/timefilter.test.ts
+++ b/src/plugins/data/public/query/timefilter/timefilter.test.ts
@@ -10,7 +10,7 @@ jest.useFakeTimers();
import sinon from 'sinon';
import moment from 'moment';
-import { Timefilter } from './timefilter';
+import { AutoRefreshDoneFn, Timefilter } from './timefilter';
import { Subscription } from 'rxjs';
import { TimeRange, RefreshInterval } from '../../../common';
import { createNowProviderMock } from '../../now_provider/mocks';
@@ -121,7 +121,7 @@ describe('setRefreshInterval', () => {
beforeEach(() => {
update = sinon.spy();
fetch = sinon.spy();
- autoRefreshFetch = sinon.spy();
+ autoRefreshFetch = sinon.spy((done) => done());
timefilter.setRefreshInterval({
pause: false,
value: 0,
@@ -344,3 +344,44 @@ describe('calculateBounds', () => {
expect(() => timefilter.calculateBounds(timeRange)).toThrowError();
});
});
+
+describe('getAutoRefreshFetch$', () => {
+ test('next auto refresh loop starts after "done" called', () => {
+ const autoRefreshFetch = jest.fn();
+ let doneCb: AutoRefreshDoneFn | undefined;
+ timefilter.getAutoRefreshFetch$().subscribe((done) => {
+ autoRefreshFetch();
+ doneCb = done;
+ });
+ timefilter.setRefreshInterval({ pause: false, value: 1000 });
+
+ expect(autoRefreshFetch).toBeCalledTimes(0);
+ jest.advanceTimersByTime(5000);
+ expect(autoRefreshFetch).toBeCalledTimes(1);
+
+ if (doneCb) doneCb();
+
+ jest.advanceTimersByTime(1005);
+ expect(autoRefreshFetch).toBeCalledTimes(2);
+ });
+
+ test('new getAutoRefreshFetch$ subscription restarts refresh loop', () => {
+ const autoRefreshFetch = jest.fn();
+ const fetch$ = timefilter.getAutoRefreshFetch$();
+ const sub1 = fetch$.subscribe((done) => {
+ autoRefreshFetch();
+ // this done will be never called, but loop will be reset by another subscription
+ });
+ timefilter.setRefreshInterval({ pause: false, value: 1000 });
+
+ expect(autoRefreshFetch).toBeCalledTimes(0);
+ jest.advanceTimersByTime(5000);
+ expect(autoRefreshFetch).toBeCalledTimes(1);
+
+ fetch$.subscribe(autoRefreshFetch);
+ expect(autoRefreshFetch).toBeCalledTimes(1);
+ sub1.unsubscribe();
+ jest.advanceTimersByTime(1005);
+ expect(autoRefreshFetch).toBeCalledTimes(2);
+ });
+});
diff --git a/src/plugins/data/public/query/timefilter/timefilter.ts b/src/plugins/data/public/query/timefilter/timefilter.ts
index 436b18f70a2f8..9894010601d2b 100644
--- a/src/plugins/data/public/query/timefilter/timefilter.ts
+++ b/src/plugins/data/public/query/timefilter/timefilter.ts
@@ -22,6 +22,9 @@ import {
TimeRange,
} from '../../../common';
import { TimeHistoryContract } from './time_history';
+import { createAutoRefreshLoop, AutoRefreshDoneFn } from './lib/auto_refresh_loop';
+
+export { AutoRefreshDoneFn };
// TODO: remove!
@@ -32,8 +35,6 @@ export class Timefilter {
private timeUpdate$ = new Subject();
// Fired when a user changes the the autorefresh settings
private refreshIntervalUpdate$ = new Subject();
- // Used when an auto refresh is triggered
- private autoRefreshFetch$ = new Subject();
private fetch$ = new Subject();
private _time: TimeRange;
@@ -45,11 +46,12 @@ export class Timefilter {
private _isTimeRangeSelectorEnabled: boolean = false;
private _isAutoRefreshSelectorEnabled: boolean = false;
- private _autoRefreshIntervalId: number = 0;
-
private readonly timeDefaults: TimeRange;
private readonly refreshIntervalDefaults: RefreshInterval;
+ // Used when an auto refresh is triggered
+ private readonly autoRefreshLoop = createAutoRefreshLoop();
+
constructor(
config: TimefilterConfig,
timeHistory: TimeHistoryContract,
@@ -86,9 +88,13 @@ export class Timefilter {
return this.refreshIntervalUpdate$.asObservable();
};
- public getAutoRefreshFetch$ = () => {
- return this.autoRefreshFetch$.asObservable();
- };
+ /**
+ * Get an observable that emits when it is time to refetch data due to refresh interval
+ * Each subscription to this observable resets internal interval
+ * Emitted value is a callback {@link AutoRefreshDoneFn} that must be called to restart refresh interval loop
+ * Apps should use this callback to start next auto refresh loop when view finished updating
+ */
+ public getAutoRefreshFetch$ = () => this.autoRefreshLoop.loop$;
public getFetch$ = () => {
return this.fetch$.asObservable();
@@ -166,13 +172,9 @@ export class Timefilter {
}
}
- // Clear the previous auto refresh interval and start a new one (if not paused)
- clearInterval(this._autoRefreshIntervalId);
- if (!newRefreshInterval.pause) {
- this._autoRefreshIntervalId = window.setInterval(
- () => this.autoRefreshFetch$.next(),
- newRefreshInterval.value
- );
+ this.autoRefreshLoop.stop();
+ if (!newRefreshInterval.pause && newRefreshInterval.value !== 0) {
+ this.autoRefreshLoop.start(newRefreshInterval.value);
}
};
diff --git a/src/plugins/data/public/query/timefilter/timefilter_service.mock.ts b/src/plugins/data/public/query/timefilter/timefilter_service.mock.ts
index 0f2b01f618186..c22f62f45a709 100644
--- a/src/plugins/data/public/query/timefilter/timefilter_service.mock.ts
+++ b/src/plugins/data/public/query/timefilter/timefilter_service.mock.ts
@@ -20,7 +20,7 @@ const createSetupContractMock = () => {
getEnabledUpdated$: jest.fn(),
getTimeUpdate$: jest.fn(),
getRefreshIntervalUpdate$: jest.fn(),
- getAutoRefreshFetch$: jest.fn(() => new Observable()),
+ getAutoRefreshFetch$: jest.fn(() => new Observable<() => void>()),
getFetch$: jest.fn(),
getTime: jest.fn(),
setTime: jest.fn(),
diff --git a/src/plugins/data/public/search/index.ts b/src/plugins/data/public/search/index.ts
index fded4c46992c0..92a5c36202e6f 100644
--- a/src/plugins/data/public/search/index.ts
+++ b/src/plugins/data/public/search/index.ts
@@ -45,6 +45,8 @@ export {
ISessionsClient,
noSearchSessionStorageCapabilityMessage,
SEARCH_SESSIONS_MANAGEMENT_ID,
+ waitUntilNextSessionCompletes$,
+ WaitUntilNextSessionCompletesOptions,
} from './session';
export { getEsPreference } from './es_search';
diff --git a/src/plugins/data/public/search/session/index.ts b/src/plugins/data/public/search/session/index.ts
index 15410400a33e6..ce578378a2fe8 100644
--- a/src/plugins/data/public/search/session/index.ts
+++ b/src/plugins/data/public/search/session/index.ts
@@ -11,3 +11,7 @@ export { SearchSessionState } from './search_session_state';
export { SessionsClient, ISessionsClient } from './sessions_client';
export { noSearchSessionStorageCapabilityMessage } from './i18n';
export { SEARCH_SESSIONS_MANAGEMENT_ID } from './constants';
+export {
+ waitUntilNextSessionCompletes$,
+ WaitUntilNextSessionCompletesOptions,
+} from './session_helpers';
diff --git a/src/plugins/data/public/search/session/session_helpers.test.ts b/src/plugins/data/public/search/session/session_helpers.test.ts
new file mode 100644
index 0000000000000..5b64e7b554d18
--- /dev/null
+++ b/src/plugins/data/public/search/session/session_helpers.test.ts
@@ -0,0 +1,88 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { waitUntilNextSessionCompletes$ } from './session_helpers';
+import { ISessionService, SessionService } from './session_service';
+import { BehaviorSubject } from 'rxjs';
+import { SearchSessionState } from './search_session_state';
+import { NowProviderInternalContract } from '../../now_provider';
+import { coreMock } from '../../../../../core/public/mocks';
+import { createNowProviderMock } from '../../now_provider/mocks';
+import { SEARCH_SESSIONS_MANAGEMENT_ID } from './constants';
+import { getSessionsClientMock } from './mocks';
+
+let sessionService: ISessionService;
+let state$: BehaviorSubject;
+let nowProvider: jest.Mocked;
+let currentAppId$: BehaviorSubject;
+
+beforeEach(() => {
+ const initializerContext = coreMock.createPluginInitializerContext();
+ const startService = coreMock.createSetup().getStartServices;
+ nowProvider = createNowProviderMock();
+ currentAppId$ = new BehaviorSubject('app');
+ sessionService = new SessionService(
+ initializerContext,
+ () =>
+ startService().then(([coreStart, ...rest]) => [
+ {
+ ...coreStart,
+ application: {
+ ...coreStart.application,
+ currentAppId$,
+ capabilities: {
+ ...coreStart.application.capabilities,
+ management: {
+ kibana: {
+ [SEARCH_SESSIONS_MANAGEMENT_ID]: true,
+ },
+ },
+ },
+ },
+ },
+ ...rest,
+ ]),
+ getSessionsClientMock(),
+ nowProvider,
+ { freezeState: false } // needed to use mocks inside state container
+ );
+ state$ = new BehaviorSubject(SearchSessionState.None);
+ sessionService.state$.subscribe(state$);
+});
+
+describe('waitUntilNextSessionCompletes$', () => {
+ beforeEach(() => {
+ jest.useFakeTimers();
+ });
+ afterEach(() => {
+ jest.useRealTimers();
+ });
+ test('emits when next session starts', () => {
+ sessionService.start();
+ let untrackSearch = sessionService.trackSearch({ abort: () => {} });
+ untrackSearch();
+
+ const next = jest.fn();
+ const complete = jest.fn();
+ waitUntilNextSessionCompletes$(sessionService).subscribe({ next, complete });
+ expect(next).not.toBeCalled();
+
+ sessionService.start();
+ expect(next).not.toBeCalled();
+
+ untrackSearch = sessionService.trackSearch({ abort: () => {} });
+ untrackSearch();
+
+ expect(next).not.toBeCalled();
+ jest.advanceTimersByTime(500);
+ expect(next).not.toBeCalled();
+ jest.advanceTimersByTime(1000);
+ expect(next).toBeCalledTimes(1);
+ expect(complete).toBeCalled();
+ });
+});
diff --git a/src/plugins/data/public/search/session/session_helpers.ts b/src/plugins/data/public/search/session/session_helpers.ts
new file mode 100644
index 0000000000000..1f0a2da7e93f4
--- /dev/null
+++ b/src/plugins/data/public/search/session/session_helpers.ts
@@ -0,0 +1,48 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { debounceTime, first, skipUntil } from 'rxjs/operators';
+import { ISessionService } from './session_service';
+import { SearchSessionState } from './search_session_state';
+
+/**
+ * Options for {@link waitUntilNextSessionCompletes$}
+ */
+export interface WaitUntilNextSessionCompletesOptions {
+ /**
+ * For how long to wait between session state transitions before considering that session completed
+ */
+ waitForIdle?: number;
+}
+
+/**
+ * Creates an observable that emits when next search session completes.
+ * This utility is helpful to use in the application to delay some tasks until next session completes.
+ *
+ * @param sessionService - {@link ISessionService}
+ * @param opts - {@link WaitUntilNextSessionCompletesOptions}
+ */
+export function waitUntilNextSessionCompletes$(
+ sessionService: ISessionService,
+ { waitForIdle = 1000 }: WaitUntilNextSessionCompletesOptions = { waitForIdle: 1000 }
+) {
+ return sessionService.state$.pipe(
+ // wait until new session starts
+ skipUntil(sessionService.state$.pipe(first((state) => state === SearchSessionState.None))),
+ // wait until new session starts loading
+ skipUntil(sessionService.state$.pipe(first((state) => state === SearchSessionState.Loading))),
+ // debounce to ignore quick switches from loading <-> completed.
+ // that could happen between sequential search requests inside a single session
+ debounceTime(waitForIdle),
+ // then wait until it finishes
+ first(
+ (state) =>
+ state === SearchSessionState.Completed || state === SearchSessionState.BackgroundCompleted
+ )
+ );
+}
diff --git a/src/plugins/data/public/ui/index_pattern_select/index_pattern_select.tsx b/src/plugins/data/public/ui/index_pattern_select/index_pattern_select.tsx
index aa36323d11bcc..04bdb7a690268 100644
--- a/src/plugins/data/public/ui/index_pattern_select/index_pattern_select.tsx
+++ b/src/plugins/data/public/ui/index_pattern_select/index_pattern_select.tsx
@@ -25,7 +25,6 @@ export type IndexPatternSelectProps = Required<
indexPatternId: string;
fieldTypes?: string[];
onNoIndexPatterns?: () => void;
- maxIndexPatterns?: number;
};
export type IndexPatternSelectInternalProps = IndexPatternSelectProps & {
@@ -42,10 +41,6 @@ interface IndexPatternSelectState {
// Needed for React.lazy
// eslint-disable-next-line import/no-default-export
export default class IndexPatternSelect extends Component {
- static defaultProps: {
- maxIndexPatterns: 1000;
- };
-
private isMounted: boolean = false;
state: IndexPatternSelectState;
@@ -67,7 +62,7 @@ export default class IndexPatternSelect extends Component {
- const { fieldTypes, onNoIndexPatterns, indexPatternService } = this.props;
- const indexPatterns = await indexPatternService.find(
- `${searchValue}*`,
- this.props.maxIndexPatterns
- );
+ const isCurrentSearch = () => {
+ return this.isMounted && searchValue === this.state.searchValue;
+ };
- // We need this check to handle the case where search results come back in a different
- // order than they were sent out. Only load results for the most recent search.
- if (searchValue !== this.state.searchValue || !this.isMounted) {
+ const idsAndTitles = await this.props.indexPatternService.getIdsWithTitle();
+ if (!isCurrentSearch()) {
return;
}
- const options = indexPatterns
- .filter((indexPattern) => {
- return fieldTypes
- ? indexPattern.fields.some((field) => {
- return fieldTypes.includes(field.type);
- })
- : true;
- })
- .map((indexPattern) => {
- return {
- label: indexPattern.title,
- value: indexPattern.id,
- };
+ const options = [];
+ for (let i = 0; i < idsAndTitles.length; i++) {
+ if (!idsAndTitles[i].title.toLowerCase().includes(searchValue.toLowerCase())) {
+ // index pattern excluded due to title not matching search
+ continue;
+ }
+
+ if (this.props.fieldTypes) {
+ try {
+ const indexPattern = await this.props.indexPatternService.get(idsAndTitles[i].id);
+ if (!isCurrentSearch()) {
+ return;
+ }
+ const hasRequiredFieldTypes = indexPattern.fields.some((field) => {
+ return this.props.fieldTypes!.includes(field.type);
+ });
+ if (!hasRequiredFieldTypes) {
+ continue;
+ }
+ } catch (err) {
+ // could not load index pattern, exclude it from list.
+ continue;
+ }
+ }
+
+ options.push({
+ label: idsAndTitles[i].title,
+ value: idsAndTitles[i].id,
});
+
+ // Loading each index pattern object requires a network call so just find small number of matching index patterns
+ // Users can use 'searchValue' to further refine the list and locate their index pattern.
+ if (options.length > 15) {
+ break;
+ }
+ }
+
this.setState({
isLoading: false,
options,
});
- if (onNoIndexPatterns && searchValue === '' && options.length === 0) {
- onNoIndexPatterns();
+ if (this.props.onNoIndexPatterns && searchValue === '' && options.length === 0) {
+ this.props.onNoIndexPatterns();
}
}, 300);
diff --git a/src/plugins/data/public/ui/query_string_input/_query_bar.scss b/src/plugins/data/public/ui/query_string_input/_query_bar.scss
index 466cc8c3de0b7..4e12f11668734 100644
--- a/src/plugins/data/public/ui/query_string_input/_query_bar.scss
+++ b/src/plugins/data/public/ui/query_string_input/_query_bar.scss
@@ -17,6 +17,16 @@
@include kbnThemeStyle('v8') {
background-color: $euiFormBackgroundColor;
+ border-radius: $euiFormControlBorderRadius;
+
+ &.kbnQueryBar__textareaWrap--hasPrepend {
+ border-top-left-radius: 0;
+ border-bottom-left-radius: 0;
+ }
+ &.kbnQueryBar__textareaWrap--hasAppend {
+ border-top-right-radius: 0;
+ border-bottom-right-radius: 0;
+ }
}
}
@@ -35,8 +45,16 @@
}
@include kbnThemeStyle('v8') {
- border-radius: 0;
padding-bottom: $euiSizeS + 1px;
+
+ &.kbnQueryBar__textarea--hasPrepend {
+ border-top-left-radius: 0;
+ border-bottom-left-radius: 0;
+ }
+ &.kbnQueryBar__textarea--hasAppend {
+ border-top-right-radius: 0;
+ border-bottom-right-radius: 0;
+ }
}
&:not(.kbnQueryBar__textarea--autoHeight):not(:invalid) {
diff --git a/src/plugins/data/public/ui/query_string_input/query_string_input.tsx b/src/plugins/data/public/ui/query_string_input/query_string_input.tsx
index 900a4ab7d7eb7..0f660f87266fd 100644
--- a/src/plugins/data/public/ui/query_string_input/query_string_input.tsx
+++ b/src/plugins/data/public/ui/query_string_input/query_string_input.tsx
@@ -682,7 +682,14 @@ export default class QueryStringInputUI extends Component {
);
const inputClassName = classNames(
'kbnQueryBar__textarea',
- this.props.iconType ? 'kbnQueryBar__textarea--withIcon' : null
+ this.props.iconType ? 'kbnQueryBar__textarea--withIcon' : null,
+ this.props.prepend ? 'kbnQueryBar__textarea--hasPrepend' : null,
+ !this.props.disableLanguageSwitcher ? 'kbnQueryBar__textarea--hasAppend' : null
+ );
+ const inputWrapClassName = classNames(
+ 'euiFormControlLayout__childrenWrapper kbnQueryBar__textareaWrap',
+ this.props.prepend ? 'kbnQueryBar__textareaWrap--hasPrepend' : null,
+ !this.props.disableLanguageSwitcher ? 'kbnQueryBar__textareaWrap--hasAppend' : null
);
return (
@@ -711,7 +718,7 @@ export default class QueryStringInputUI extends Component {
>
> {
},
[UI_SETTINGS.HISTOGRAM_BAR_TARGET]: {
name: i18n.translate('data.advancedSettings.histogram.barTargetTitle', {
- defaultMessage: 'Target bars',
+ defaultMessage: 'Target buckets',
}),
value: 50,
description: i18n.translate('data.advancedSettings.histogram.barTargetText', {
defaultMessage:
- 'Attempt to generate around this many bars when using "auto" interval in date histograms',
+ 'Attempt to generate around this many buckets when using "auto" interval in date and numeric histograms',
}),
schema: schema.number(),
},
[UI_SETTINGS.HISTOGRAM_MAX_BARS]: {
name: i18n.translate('data.advancedSettings.histogram.maxBarsTitle', {
- defaultMessage: 'Maximum bars',
+ defaultMessage: 'Maximum buckets',
}),
value: 100,
description: i18n.translate('data.advancedSettings.histogram.maxBarsText', {
- defaultMessage:
- 'Never show more than this many bars in date histograms, scale values if needed',
+ defaultMessage: `
+ Limits the density of date and number histograms across Kibana
+ for better performance using a test query. If the test query would too many buckets,
+ the interval between buckets will be increased. This setting applies separately
+ to each histogram aggregation, and does not apply to other types of aggregation.
+ To find the maximum value of this setting, divide the Elasticsearch 'search.max_buckets'
+ value by the maximum number of aggregations in each visualization.
+ `,
}),
schema: schema.number(),
},
diff --git a/src/plugins/discover/public/application/angular/discover.js b/src/plugins/discover/public/application/angular/discover.js
index 2c80fc111c740..45382af098644 100644
--- a/src/plugins/discover/public/application/angular/discover.js
+++ b/src/plugins/discover/public/application/angular/discover.js
@@ -8,7 +8,7 @@
import _ from 'lodash';
import { merge, Subject, Subscription } from 'rxjs';
-import { debounceTime } from 'rxjs/operators';
+import { debounceTime, tap, filter } from 'rxjs/operators';
import { i18n } from '@kbn/i18n';
import { createSearchSessionRestorationDataProvider, getState, splitState } from './discover_state';
import { RequestAdapter } from '../../../../inspector/public';
@@ -25,8 +25,6 @@ import { discoverResponseHandler } from './response_handler';
import {
getAngularModule,
getHeaderActionMenuMounter,
- getRequestInspectorStats,
- getResponseInspectorStats,
getServices,
getUrlTracker,
redirectWhenMissing,
@@ -153,7 +151,6 @@ function discoverController($route, $scope) {
const subscriptions = new Subscription();
const refetch$ = new Subject();
- let inspectorRequest;
let isChangingIndexPattern = false;
const savedSearch = $route.current.locals.savedObjects.savedSearch;
const persistentSearchSource = savedSearch.searchSource;
@@ -393,12 +390,11 @@ function discoverController($route, $scope) {
$scope.state.index = $scope.indexPattern.id;
$scope.state.sort = getSortArray($scope.state.sort, $scope.indexPattern);
- $scope.opts.fetch = $scope.fetch = function () {
+ $scope.opts.fetch = $scope.fetch = async function () {
$scope.fetchCounter++;
$scope.fetchError = undefined;
if (!validateTimeRange(timefilter.getTime(), toastNotifications)) {
$scope.resultState = 'none';
- return;
}
// Abort any in-progress requests before fetching again
@@ -418,12 +414,14 @@ function discoverController($route, $scope) {
$scope.fetchStatus = fetchStatuses.LOADING;
$scope.resultState = getResultState($scope.fetchStatus, $scope.rows);
- logInspectorRequest({ searchSessionId });
+
return $scope.volatileSearchSource
- .fetch({
+ .fetch$({
abortSignal: abortController.signal,
sessionId: searchSessionId,
+ requestResponder: getRequestResponder({ searchSessionId }),
})
+ .toPromise()
.then(onResults)
.catch((error) => {
// If the request was aborted then no need to surface this error in the UI
@@ -440,10 +438,6 @@ function discoverController($route, $scope) {
};
function onResults(resp) {
- inspectorRequest
- .stats(getResponseInspectorStats(resp, $scope.volatileSearchSource))
- .ok({ json: resp });
-
if (getTimeField() && !$scope.state.hideChart) {
const tabifiedData = tabifyAggResponse($scope.opts.chartAggConfigs, resp);
$scope.volatileSearchSource.rawResponse = resp;
@@ -464,7 +458,7 @@ function discoverController($route, $scope) {
$scope.fetchStatus = fetchStatuses.COMPLETE;
}
- function logInspectorRequest({ searchSessionId = null } = { searchSessionId: null }) {
+ function getRequestResponder({ searchSessionId = null } = { searchSessionId: null }) {
inspectorAdapters.requests.reset();
const title = i18n.translate('discover.inspectorRequestDataTitle', {
defaultMessage: 'data',
@@ -472,11 +466,7 @@ function discoverController($route, $scope) {
const description = i18n.translate('discover.inspectorRequestDescription', {
defaultMessage: 'This request queries Elasticsearch to fetch the data for the search.',
});
- inspectorRequest = inspectorAdapters.requests.start(title, { description, searchSessionId });
- inspectorRequest.stats(getRequestInspectorStats($scope.volatileSearchSource));
- $scope.volatileSearchSource.getSearchRequestBody().then((body) => {
- inspectorRequest.json(body);
- });
+ return inspectorAdapters.requests.start(title, { description, searchSessionId });
}
$scope.resetQuery = function () {
@@ -494,11 +484,19 @@ function discoverController($route, $scope) {
showUnmappedFields,
};
+ // handler emitted by `timefilter.getAutoRefreshFetch$()`
+ // to notify when data completed loading and to start a new autorefresh loop
+ let autoRefreshDoneCb;
const fetch$ = merge(
refetch$,
filterManager.getFetches$(),
timefilter.getFetch$(),
- timefilter.getAutoRefreshFetch$(),
+ timefilter.getAutoRefreshFetch$().pipe(
+ tap((done) => {
+ autoRefreshDoneCb = done;
+ }),
+ filter(() => $scope.fetchStatus !== fetchStatuses.LOADING)
+ ),
data.query.queryString.getUpdates$(),
searchSessionManager.newSearchSessionIdFromURL$
).pipe(debounceTime(100));
@@ -508,7 +506,16 @@ function discoverController($route, $scope) {
$scope,
fetch$,
{
- next: $scope.fetch,
+ next: async () => {
+ try {
+ await $scope.fetch();
+ } finally {
+ // if there is a saved `autoRefreshDoneCb`, notify auto refresh service that
+ // the last fetch is completed so it starts the next auto refresh loop if needed
+ autoRefreshDoneCb?.();
+ autoRefreshDoneCb = undefined;
+ }
+ },
},
(error) => addFatalError(core.fatalErrors, error)
)
diff --git a/src/plugins/discover/public/application/embeddable/search_embeddable.ts b/src/plugins/discover/public/application/embeddable/search_embeddable.ts
index e7349ed22355a..237da72ae3a52 100644
--- a/src/plugins/discover/public/application/embeddable/search_embeddable.ts
+++ b/src/plugins/discover/public/application/embeddable/search_embeddable.ts
@@ -29,13 +29,7 @@ import searchTemplateGrid from './search_template_datagrid.html';
import { ISearchEmbeddable, SearchInput, SearchOutput } from './types';
import { SortOrder } from '../angular/doc_table/components/table_header/helpers';
import { getSortForSearchSource } from '../angular/doc_table';
-import {
- getRequestInspectorStats,
- getResponseInspectorStats,
- getServices,
- IndexPattern,
- ISearchSource,
-} from '../../kibana_services';
+import { getServices, IndexPattern, ISearchSource } from '../../kibana_services';
import { SEARCH_EMBEDDABLE_TYPE } from './constants';
import { SavedSearch } from '../..';
import {
@@ -330,14 +324,11 @@ export class SearchEmbeddable
defaultMessage: 'This request queries Elasticsearch to fetch the data for the search.',
});
- const inspectorRequest = this.inspectorAdapters.requests!.start(title, {
+ const requestResponder = this.inspectorAdapters.requests!.start(title, {
description,
searchSessionId,
});
- inspectorRequest.stats(getRequestInspectorStats(searchSource));
- searchSource.getSearchRequestBody().then((body: Record) => {
- inspectorRequest.json(body);
- });
+
this.searchScope.$apply(() => {
this.searchScope!.isLoading = true;
});
@@ -345,15 +336,15 @@ export class SearchEmbeddable
try {
// Make the request
- const resp = await searchSource.fetch({
- abortSignal: this.abortController.signal,
- sessionId: searchSessionId,
- });
+ const resp = await searchSource
+ .fetch$({
+ abortSignal: this.abortController.signal,
+ sessionId: searchSessionId,
+ requestResponder,
+ })
+ .toPromise();
this.updateOutput({ loading: false, error: undefined });
- // Log response to inspector
- inspectorRequest.stats(getResponseInspectorStats(resp, searchSource)).ok({ json: resp });
-
// Apply the changes to the angular scope
this.searchScope.$apply(() => {
this.searchScope!.hits = resp.hits.hits;
diff --git a/src/plugins/discover/public/kibana_services.ts b/src/plugins/discover/public/kibana_services.ts
index 27bcc00234939..e4b0035ed0e03 100644
--- a/src/plugins/discover/public/kibana_services.ts
+++ b/src/plugins/discover/public/kibana_services.ts
@@ -88,7 +88,7 @@ export const [getScopedHistory, setScopedHistory] = createGetterSetter> {
+ public getUpdated$(): Readonly> {
return merge(this.getInput$().pipe(skip(1)), this.getOutput$().pipe(skip(1))).pipe(
- debounceTime(0),
- mapTo(undefined)
+ debounceTime(0)
);
}
diff --git a/src/plugins/embeddable/public/public.api.md b/src/plugins/embeddable/public/public.api.md
index b9719542adc81..3f0907acabdfa 100644
--- a/src/plugins/embeddable/public/public.api.md
+++ b/src/plugins/embeddable/public/public.api.md
@@ -282,7 +282,7 @@ export abstract class Embeddable>;
+ getUpdated$(): Readonly>;
// (undocumented)
readonly id: string;
// (undocumented)
diff --git a/src/plugins/expressions/public/loader.ts b/src/plugins/expressions/public/loader.ts
index 65925b5a2e4c2..4165b8906a20e 100644
--- a/src/plugins/expressions/public/loader.ts
+++ b/src/plugins/expressions/public/loader.ts
@@ -118,12 +118,15 @@ export class ExpressionLoader {
return this.execution ? (this.execution.inspect() as Adapters) : undefined;
}
- update(expression?: string | ExpressionAstExpression, params?: IExpressionLoaderParams): void {
+ async update(
+ expression?: string | ExpressionAstExpression,
+ params?: IExpressionLoaderParams
+ ): Promise {
this.setParams(params);
this.loadingSubject.next(true);
if (expression) {
- this.loadData(expression, this.params);
+ await this.loadData(expression, this.params);
} else if (this.data) {
this.render(this.data);
}
diff --git a/src/plugins/kibana_usage_collection/tsconfig.json b/src/plugins/kibana_usage_collection/tsconfig.json
index d664d936f6667..ee07dfe589e4a 100644
--- a/src/plugins/kibana_usage_collection/tsconfig.json
+++ b/src/plugins/kibana_usage_collection/tsconfig.json
@@ -5,7 +5,8 @@
"outDir": "./target/types",
"emitDeclarationOnly": true,
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "isolatedModules": true
},
"include": [
"common/*",
diff --git a/src/plugins/kibana_utils/common/errors/errors.ts b/src/plugins/kibana_utils/common/errors/errors.ts
index 7a9495cc8f413..7f3efc6d9571f 100644
--- a/src/plugins/kibana_utils/common/errors/errors.ts
+++ b/src/plugins/kibana_utils/common/errors/errors.ts
@@ -32,7 +32,7 @@ export class DuplicateField extends KbnError {
export class SavedObjectNotFound extends KbnError {
public savedObjectType: string;
public savedObjectId?: string;
- constructor(type: string, id?: string, link?: string) {
+ constructor(type: string, id?: string, link?: string, customMessage?: string) {
const idMsg = id ? ` (id: ${id})` : '';
let message = `Could not locate that ${type}${idMsg}`;
@@ -40,13 +40,31 @@ export class SavedObjectNotFound extends KbnError {
message += `, [click here to re-create it](${link})`;
}
- super(message);
+ super(customMessage || message);
this.savedObjectType = type;
this.savedObjectId = id;
}
}
+/**
+ * A saved field doesn't exist anymore
+ */
+export class SavedFieldNotFound extends KbnError {
+ constructor(message: string) {
+ super(message);
+ }
+}
+
+/**
+ * A saved field type isn't compatible with aggregation
+ */
+export class SavedFieldTypeInvalidForAgg extends KbnError {
+ constructor(message: string) {
+ super(message);
+ }
+}
+
/**
* This error is for scenarios where a saved object is detected that has invalid JSON properties.
* There was a scenario where we were importing objects with double-encoded JSON, and the system
diff --git a/src/plugins/telemetry/common/telemetry_config/index.ts b/src/plugins/telemetry/common/telemetry_config/index.ts
index 84b6486f35b24..cc4ff102742d7 100644
--- a/src/plugins/telemetry/common/telemetry_config/index.ts
+++ b/src/plugins/telemetry/common/telemetry_config/index.ts
@@ -9,7 +9,5 @@
export { getTelemetryOptIn } from './get_telemetry_opt_in';
export { getTelemetrySendUsageFrom } from './get_telemetry_send_usage_from';
export { getTelemetryAllowChangingOptInStatus } from './get_telemetry_allow_changing_opt_in_status';
-export {
- getTelemetryFailureDetails,
- TelemetryFailureDetails,
-} from './get_telemetry_failure_details';
+export { getTelemetryFailureDetails } from './get_telemetry_failure_details';
+export type { TelemetryFailureDetails } from './get_telemetry_failure_details';
diff --git a/src/plugins/telemetry/public/index.ts b/src/plugins/telemetry/public/index.ts
index 6cca9bdf881dd..47ba7828eaec2 100644
--- a/src/plugins/telemetry/public/index.ts
+++ b/src/plugins/telemetry/public/index.ts
@@ -8,7 +8,7 @@
import { PluginInitializerContext } from 'kibana/public';
import { TelemetryPlugin, TelemetryPluginConfig } from './plugin';
-export { TelemetryPluginStart, TelemetryPluginSetup } from './plugin';
+export type { TelemetryPluginStart, TelemetryPluginSetup } from './plugin';
export function plugin(initializerContext: PluginInitializerContext) {
return new TelemetryPlugin(initializerContext);
diff --git a/src/plugins/telemetry/server/index.ts b/src/plugins/telemetry/server/index.ts
index debdf7515cd58..1c335426ffd03 100644
--- a/src/plugins/telemetry/server/index.ts
+++ b/src/plugins/telemetry/server/index.ts
@@ -13,7 +13,7 @@ import { configSchema, TelemetryConfigType } from './config';
export { FetcherTask } from './fetcher';
export { handleOldSettings } from './handle_old_settings';
-export { TelemetryPluginSetup, TelemetryPluginStart } from './plugin';
+export type { TelemetryPluginSetup, TelemetryPluginStart } from './plugin';
export const config: PluginConfigDescriptor = {
schema: configSchema,
@@ -34,9 +34,12 @@ export { constants };
export {
getClusterUuids,
getLocalStats,
- TelemetryLocalStats,
DATA_TELEMETRY_ID,
+ buildDataTelemetryPayload,
+} from './telemetry_collection';
+
+export type {
+ TelemetryLocalStats,
DataTelemetryIndex,
DataTelemetryPayload,
- buildDataTelemetryPayload,
} from './telemetry_collection';
diff --git a/src/plugins/telemetry/server/telemetry_collection/get_data_telemetry/index.ts b/src/plugins/telemetry/server/telemetry_collection/get_data_telemetry/index.ts
index def1131dfb1a3..c93b7e872924b 100644
--- a/src/plugins/telemetry/server/telemetry_collection/get_data_telemetry/index.ts
+++ b/src/plugins/telemetry/server/telemetry_collection/get_data_telemetry/index.ts
@@ -7,10 +7,5 @@
*/
export { DATA_TELEMETRY_ID } from './constants';
-
-export {
- getDataTelemetry,
- buildDataTelemetryPayload,
- DataTelemetryPayload,
- DataTelemetryIndex,
-} from './get_data_telemetry';
+export { getDataTelemetry, buildDataTelemetryPayload } from './get_data_telemetry';
+export type { DataTelemetryPayload, DataTelemetryIndex } from './get_data_telemetry';
diff --git a/src/plugins/telemetry/server/telemetry_collection/index.ts b/src/plugins/telemetry/server/telemetry_collection/index.ts
index 55f9c7f0e624c..151e89a11a192 100644
--- a/src/plugins/telemetry/server/telemetry_collection/index.ts
+++ b/src/plugins/telemetry/server/telemetry_collection/index.ts
@@ -6,12 +6,9 @@
* Side Public License, v 1.
*/
-export {
- DATA_TELEMETRY_ID,
- DataTelemetryIndex,
- DataTelemetryPayload,
- buildDataTelemetryPayload,
-} from './get_data_telemetry';
-export { getLocalStats, TelemetryLocalStats } from './get_local_stats';
+export { DATA_TELEMETRY_ID, buildDataTelemetryPayload } from './get_data_telemetry';
+export type { DataTelemetryIndex, DataTelemetryPayload } from './get_data_telemetry';
+export { getLocalStats } from './get_local_stats';
+export type { TelemetryLocalStats } from './get_local_stats';
export { getClusterUuids } from './get_cluster_stats';
export { registerCollection } from './register_collection';
diff --git a/src/plugins/telemetry/server/telemetry_repository/index.ts b/src/plugins/telemetry/server/telemetry_repository/index.ts
index 4e3f046f7611f..594b53259a65f 100644
--- a/src/plugins/telemetry/server/telemetry_repository/index.ts
+++ b/src/plugins/telemetry/server/telemetry_repository/index.ts
@@ -8,7 +8,7 @@
export { getTelemetrySavedObject } from './get_telemetry_saved_object';
export { updateTelemetrySavedObject } from './update_telemetry_saved_object';
-export {
+export type {
TelemetrySavedObject,
TelemetrySavedObjectAttributes,
} from '../../common/telemetry_config/types';
diff --git a/src/plugins/telemetry/tsconfig.json b/src/plugins/telemetry/tsconfig.json
index bdced01d9eb6f..6629e479906c9 100644
--- a/src/plugins/telemetry/tsconfig.json
+++ b/src/plugins/telemetry/tsconfig.json
@@ -5,7 +5,8 @@
"outDir": "./target/types",
"emitDeclarationOnly": true,
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "isolatedModules": true
},
"include": [
"public/**/**/*",
diff --git a/src/plugins/telemetry_collection_manager/server/index.ts b/src/plugins/telemetry_collection_manager/server/index.ts
index 77077b73cf8ad..c0cd124a132c0 100644
--- a/src/plugins/telemetry_collection_manager/server/index.ts
+++ b/src/plugins/telemetry_collection_manager/server/index.ts
@@ -16,7 +16,7 @@ export function plugin(initializerContext: PluginInitializerContext) {
return new TelemetryCollectionManagerPlugin(initializerContext);
}
-export {
+export type {
TelemetryCollectionManagerPluginSetup,
TelemetryCollectionManagerPluginStart,
StatsCollectionConfig,
diff --git a/src/plugins/telemetry_collection_manager/tsconfig.json b/src/plugins/telemetry_collection_manager/tsconfig.json
index 1bba81769f0dd..1329979860603 100644
--- a/src/plugins/telemetry_collection_manager/tsconfig.json
+++ b/src/plugins/telemetry_collection_manager/tsconfig.json
@@ -5,7 +5,8 @@
"outDir": "./target/types",
"emitDeclarationOnly": true,
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "isolatedModules": true
},
"include": [
"server/**/*",
diff --git a/src/plugins/telemetry_management_section/public/index.ts b/src/plugins/telemetry_management_section/public/index.ts
index 28b04418f512d..db6ea17556ed3 100644
--- a/src/plugins/telemetry_management_section/public/index.ts
+++ b/src/plugins/telemetry_management_section/public/index.ts
@@ -10,7 +10,7 @@ import { TelemetryManagementSectionPlugin } from './plugin';
export { OptInExampleFlyout } from './components';
-export { TelemetryManagementSectionPluginSetup } from './plugin';
+export type { TelemetryManagementSectionPluginSetup } from './plugin';
export function plugin() {
return new TelemetryManagementSectionPlugin();
}
diff --git a/src/plugins/telemetry_management_section/tsconfig.json b/src/plugins/telemetry_management_section/tsconfig.json
index 48e40814b8570..2daee868ac200 100644
--- a/src/plugins/telemetry_management_section/tsconfig.json
+++ b/src/plugins/telemetry_management_section/tsconfig.json
@@ -5,7 +5,8 @@
"outDir": "./target/types",
"emitDeclarationOnly": true,
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "isolatedModules": true
},
"include": [
"public/**/*",
diff --git a/src/plugins/usage_collection/public/index.ts b/src/plugins/usage_collection/public/index.ts
index b9e0e0a8985b1..9b009b1d9e264 100644
--- a/src/plugins/usage_collection/public/index.ts
+++ b/src/plugins/usage_collection/public/index.ts
@@ -10,7 +10,7 @@ import { PluginInitializerContext } from '../../../core/public';
import { UsageCollectionPlugin } from './plugin';
export { METRIC_TYPE } from '@kbn/analytics';
-export { UsageCollectionSetup, UsageCollectionStart } from './plugin';
+export type { UsageCollectionSetup, UsageCollectionStart } from './plugin';
export { TrackApplicationView } from './components';
export function plugin(initializerContext: PluginInitializerContext) {
diff --git a/src/plugins/usage_collection/server/collector/index.ts b/src/plugins/usage_collection/server/collector/index.ts
index 5f48f9fb93813..d5e0d95659e58 100644
--- a/src/plugins/usage_collection/server/collector/index.ts
+++ b/src/plugins/usage_collection/server/collector/index.ts
@@ -6,9 +6,10 @@
* Side Public License, v 1.
*/
-export { CollectorSet, CollectorSetPublic } from './collector_set';
-export {
- Collector,
+export { CollectorSet } from './collector_set';
+export type { CollectorSetPublic } from './collector_set';
+export { Collector } from './collector';
+export type {
AllowedSchemaTypes,
AllowedSchemaNumberTypes,
SchemaField,
@@ -16,4 +17,5 @@ export {
CollectorOptions,
CollectorFetchContext,
} from './collector';
-export { UsageCollector, UsageCollectorOptions } from './usage_collector';
+export { UsageCollector } from './usage_collector';
+export type { UsageCollectorOptions } from './usage_collector';
diff --git a/src/plugins/usage_collection/server/index.ts b/src/plugins/usage_collection/server/index.ts
index dfc9d19b69646..dd9e6644a827d 100644
--- a/src/plugins/usage_collection/server/index.ts
+++ b/src/plugins/usage_collection/server/index.ts
@@ -9,17 +9,16 @@
import { PluginInitializerContext } from 'src/core/server';
import { UsageCollectionPlugin } from './plugin';
-export {
+export { Collector } from './collector';
+export type {
AllowedSchemaTypes,
MakeSchemaFrom,
SchemaField,
CollectorOptions,
UsageCollectorOptions,
- Collector,
CollectorFetchContext,
} from './collector';
-
-export { UsageCollectionSetup } from './plugin';
+export type { UsageCollectionSetup } from './plugin';
export { config } from './config';
export const plugin = (initializerContext: PluginInitializerContext) =>
new UsageCollectionPlugin(initializerContext);
diff --git a/src/plugins/usage_collection/server/usage_collection.mock.ts b/src/plugins/usage_collection/server/usage_collection.mock.ts
index 1a60d84e7948c..7e3f4273bbea8 100644
--- a/src/plugins/usage_collection/server/usage_collection.mock.ts
+++ b/src/plugins/usage_collection/server/usage_collection.mock.ts
@@ -16,7 +16,8 @@ import {
import { CollectorOptions, Collector, UsageCollector } from './collector';
import { UsageCollectionSetup, CollectorFetchContext } from './index';
-export { CollectorOptions, Collector };
+export type { CollectorOptions };
+export { Collector };
const logger = loggingSystemMock.createLogger();
diff --git a/src/plugins/usage_collection/tsconfig.json b/src/plugins/usage_collection/tsconfig.json
index 96b2c4d37e17c..68a0853994e80 100644
--- a/src/plugins/usage_collection/tsconfig.json
+++ b/src/plugins/usage_collection/tsconfig.json
@@ -5,7 +5,8 @@
"outDir": "./target/types",
"emitDeclarationOnly": true,
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "isolatedModules": true
},
"include": [
"public/**/*",
diff --git a/src/plugins/vis_default_editor/public/_default.scss b/src/plugins/vis_default_editor/public/_default.scss
index c412b9d915e55..56c6a0f0f63f6 100644
--- a/src/plugins/vis_default_editor/public/_default.scss
+++ b/src/plugins/vis_default_editor/public/_default.scss
@@ -1,6 +1,4 @@
.visEditor--default {
- // height: 1px is in place to make editor children take their height in the parent
- height: 1px;
flex: 1 1 auto;
display: flex;
}
@@ -80,6 +78,7 @@
.visEditor__collapsibleSidebar {
width: 100% !important; // force the editor to take 100% width
+ flex-grow: 0;
}
.visEditor__collapsibleSidebar-isClosed {
@@ -91,8 +90,10 @@
}
.visEditor__visualization__wrapper {
- // force the visualization to take 100% width and height.
+ // force the visualization to take 100% width.
width: 100% !important;
- height: 100% !important;
+ flex: 1;
+ display: flex;
+ flex-direction: column;
}
}
diff --git a/src/plugins/vis_default_editor/public/components/controls/field.test.tsx b/src/plugins/vis_default_editor/public/components/controls/field.test.tsx
index 94f767510c4bd..277804567c2b7 100644
--- a/src/plugins/vis_default_editor/public/components/controls/field.test.tsx
+++ b/src/plugins/vis_default_editor/public/components/controls/field.test.tsx
@@ -11,7 +11,7 @@ import { act } from 'react-dom/test-utils';
import { mount, shallow, ReactWrapper } from 'enzyme';
import { EuiComboBoxProps, EuiComboBox } from '@elastic/eui';
-import { IAggConfig, IndexPatternField } from 'src/plugins/data/public';
+import { IAggConfig, IndexPatternField, AggParam } from 'src/plugins/data/public';
import { ComboBoxGroupedOptions } from '../../utils';
import { FieldParamEditor, FieldParamEditorProps } from './field';
import { EditorVisState } from '../sidebar/state/reducers';
@@ -42,7 +42,7 @@ describe('FieldParamEditor component', () => {
setTouched = jest.fn();
onChange = jest.fn();
- field = { displayName: 'bytes' } as IndexPatternField;
+ field = { displayName: 'bytes', type: 'bytes' } as IndexPatternField;
option = { label: 'bytes', target: field };
indexedFields = [
{
@@ -52,7 +52,16 @@ describe('FieldParamEditor component', () => {
];
defaultProps = {
- agg: {} as IAggConfig,
+ agg: {
+ type: {
+ params: [
+ ({
+ name: 'field',
+ filterFieldTypes: ['bytes'],
+ } as unknown) as AggParam,
+ ],
+ },
+ } as IAggConfig,
aggParam: {
name: 'field',
type: 'field',
diff --git a/src/plugins/vis_default_editor/public/components/controls/field.tsx b/src/plugins/vis_default_editor/public/components/controls/field.tsx
index 95843dc6ae3a8..f8db2d89888a2 100644
--- a/src/plugins/vis_default_editor/public/components/controls/field.tsx
+++ b/src/plugins/vis_default_editor/public/components/controls/field.tsx
@@ -13,7 +13,13 @@ import useMount from 'react-use/lib/useMount';
import { EuiComboBox, EuiComboBoxOptionOption, EuiFormRow } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
-import { AggParam, IAggConfig, IFieldParamType, IndexPatternField } from 'src/plugins/data/public';
+import {
+ AggParam,
+ IAggConfig,
+ IFieldParamType,
+ IndexPatternField,
+ KBN_FIELD_TYPES,
+} from '../../../../../plugins/data/public';
import { formatListAsProse, parseCommaSeparatedList, useValidation } from './utils';
import { AggParamEditorProps } from '../agg_param_props';
import { ComboBoxGroupedOptions } from '../../utils';
@@ -55,6 +61,7 @@ function FieldParamEditor({
}
};
const errors = customError ? [customError] : [];
+ let showErrorMessageImmediately = false;
if (!indexedFields.length) {
errors.push(
@@ -69,9 +76,38 @@ function FieldParamEditor({
);
}
+ if (value && value.type === KBN_FIELD_TYPES.MISSING) {
+ errors.push(
+ i18n.translate('visDefaultEditor.controls.field.fieldIsNotExists', {
+ defaultMessage:
+ 'The field "{fieldParameter}" associated with this object no longer exists in the index pattern. Please use another field.',
+ values: {
+ fieldParameter: value.name,
+ },
+ })
+ );
+ showErrorMessageImmediately = true;
+ } else if (
+ value &&
+ !getFieldTypes(agg).find((type: string) => type === value.type || type === '*')
+ ) {
+ errors.push(
+ i18n.translate('visDefaultEditor.controls.field.invalidFieldForAggregation', {
+ defaultMessage:
+ 'Saved field "{fieldParameter}" of index pattern "{indexPatternTitle}" is invalid for use with this aggregation. Please select a new field.',
+ values: {
+ fieldParameter: value?.name,
+ indexPatternTitle: agg.getIndexPattern && agg.getIndexPattern().title,
+ },
+ })
+ );
+ showErrorMessageImmediately = true;
+ }
+
const isValid = !!value && !errors.length && !isDirty;
// we show an error message right away if there is no compatible fields
- const showErrorMessage = (showValidation || !indexedFields.length) && !isValid;
+ const showErrorMessage =
+ (showValidation || !indexedFields.length || showErrorMessageImmediately) && !isValid;
useValidation(setValidity, isValid);
useMount(() => {
@@ -122,10 +158,14 @@ function FieldParamEditor({
}
function getFieldTypesString(agg: IAggConfig) {
+ return formatListAsProse(getFieldTypes(agg), { inclusive: false });
+}
+
+function getFieldTypes(agg: IAggConfig) {
const param =
get(agg, 'type.params', []).find((p: AggParam) => p.name === 'field') ||
({} as IFieldParamType);
- return formatListAsProse(parseCommaSeparatedList(param.filterFieldTypes), { inclusive: false });
+ return parseCommaSeparatedList(param.filterFieldTypes || []);
}
export { FieldParamEditor };
diff --git a/src/plugins/vis_type_timeseries/server/ui_settings.ts b/src/plugins/vis_type_timeseries/server/ui_settings.ts
index 9b7f6a1c832ef..07d2355b22253 100644
--- a/src/plugins/vis_type_timeseries/server/ui_settings.ts
+++ b/src/plugins/vis_type_timeseries/server/ui_settings.ts
@@ -16,11 +16,12 @@ import { MAX_BUCKETS_SETTING } from '../common/constants';
export const uiSettings: Record = {
[MAX_BUCKETS_SETTING]: {
name: i18n.translate('visTypeTimeseries.advancedSettings.maxBucketsTitle', {
- defaultMessage: 'Maximum buckets',
+ defaultMessage: 'TSVB buckets limit',
}),
value: 2000,
description: i18n.translate('visTypeTimeseries.advancedSettings.maxBucketsText', {
- defaultMessage: 'The maximum number of buckets a single datasource can return',
+ defaultMessage:
+ 'Affects the TSVB histogram density. Must be set higher than "histogram:maxBars".',
}),
schema: schema.number(),
},
diff --git a/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts b/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts
index 429dabeeef042..3bb52eb15758a 100644
--- a/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts
+++ b/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts
@@ -149,8 +149,9 @@ export class VisualizeEmbeddable
}
this.subscriptions.push(
- this.getUpdated$().subscribe(() => {
+ this.getUpdated$().subscribe((value) => {
const isDirty = this.handleChanges();
+
if (isDirty && this.handler) {
this.updateHandler();
}
@@ -367,8 +368,8 @@ export class VisualizeEmbeddable
}
}
- public reload = () => {
- this.handleVisUpdate();
+ public reload = async () => {
+ await this.handleVisUpdate();
};
private async updateHandler() {
@@ -395,13 +396,13 @@ export class VisualizeEmbeddable
});
if (this.handler && !abortController.signal.aborted) {
- this.handler.update(this.expression, expressionParams);
+ await this.handler.update(this.expression, expressionParams);
}
}
private handleVisUpdate = async () => {
this.handleChanges();
- this.updateHandler();
+ await this.updateHandler();
};
private uiStateChangeHandler = () => {
diff --git a/src/plugins/visualize/public/application/components/visualize_top_nav.tsx b/src/plugins/visualize/public/application/components/visualize_top_nav.tsx
index 256e634ac6c40..f6ef1caf9c9e0 100644
--- a/src/plugins/visualize/public/application/components/visualize_top_nav.tsx
+++ b/src/plugins/visualize/public/application/components/visualize_top_nav.tsx
@@ -183,8 +183,12 @@ const TopNav = ({
useEffect(() => {
const autoRefreshFetchSub = services.data.query.timefilter.timefilter
.getAutoRefreshFetch$()
- .subscribe(() => {
- visInstance.embeddableHandler.reload();
+ .subscribe(async (done) => {
+ try {
+ await visInstance.embeddableHandler.reload();
+ } finally {
+ done();
+ }
});
return () => {
autoRefreshFetchSub.unsubscribe();
diff --git a/src/plugins/visualize/public/application/utils/get_visualization_instance.ts b/src/plugins/visualize/public/application/utils/get_visualization_instance.ts
index cc0f3ce2afae5..9eda709e58c3e 100644
--- a/src/plugins/visualize/public/application/utils/get_visualization_instance.ts
+++ b/src/plugins/visualize/public/application/utils/get_visualization_instance.ts
@@ -18,8 +18,17 @@ import { SavedObject } from 'src/plugins/saved_objects/public';
import { cloneDeep } from 'lodash';
import { ExpressionValueError } from 'src/plugins/expressions/public';
import { createSavedSearchesLoader } from '../../../../discover/public';
+import { SavedFieldNotFound, SavedFieldTypeInvalidForAgg } from '../../../../kibana_utils/common';
import { VisualizeServices } from '../types';
+function isErrorRelatedToRuntimeFields(error: ExpressionValueError['error']) {
+ const originalError = error.original || error;
+ return (
+ originalError instanceof SavedFieldNotFound ||
+ originalError instanceof SavedFieldTypeInvalidForAgg
+ );
+}
+
const createVisualizeEmbeddableAndLinkSavedSearch = async (
vis: Vis,
visualizeServices: VisualizeServices
@@ -37,7 +46,7 @@ const createVisualizeEmbeddableAndLinkSavedSearch = async (
})) as VisualizeEmbeddableContract;
embeddableHandler.getOutput$().subscribe((output) => {
- if (output.error) {
+ if (output.error && !isErrorRelatedToRuntimeFields(output.error)) {
data.search.showError(
((output.error as unknown) as ExpressionValueError['error']).original || output.error
);
diff --git a/src/plugins/visualize/public/application/utils/use/use_saved_vis_instance.ts b/src/plugins/visualize/public/application/utils/use/use_saved_vis_instance.ts
index 64d61996495d7..965951bfbd88d 100644
--- a/src/plugins/visualize/public/application/utils/use/use_saved_vis_instance.ts
+++ b/src/plugins/visualize/public/application/utils/use/use_saved_vis_instance.ts
@@ -11,13 +11,12 @@ import { EventEmitter } from 'events';
import { parse } from 'query-string';
import { i18n } from '@kbn/i18n';
-import { redirectWhenMissing } from '../../../../../kibana_utils/public';
-
import { getVisualizationInstance } from '../get_visualization_instance';
import { getEditBreadcrumbs, getCreateBreadcrumbs } from '../breadcrumbs';
import { SavedVisInstance, VisualizeServices, IEditorController } from '../../types';
import { VisualizeConstants } from '../../visualize_constants';
import { getVisEditorsRegistry } from '../../../services';
+import { redirectToSavedObjectPage } from '../utils';
/**
* This effect is responsible for instantiating a saved vis or creating a new one
@@ -43,9 +42,7 @@ export const useSavedVisInstance = (
chrome,
history,
dashboard,
- setActiveUrl,
toastNotifications,
- http: { basePath },
stateTransferService,
application: { navigateToApp },
} = services;
@@ -131,27 +128,8 @@ export const useSavedVisInstance = (
visEditorController,
});
} catch (error) {
- const managementRedirectTarget = {
- app: 'management',
- path: `kibana/objects/savedVisualizations/${visualizationIdFromUrl}`,
- };
-
try {
- redirectWhenMissing({
- history,
- navigateToApp,
- toastNotifications,
- basePath,
- mapping: {
- visualization: VisualizeConstants.LANDING_PAGE_PATH,
- search: managementRedirectTarget,
- 'index-pattern': managementRedirectTarget,
- 'index-pattern-field': managementRedirectTarget,
- },
- onBeforeRedirect() {
- setActiveUrl(VisualizeConstants.LANDING_PAGE_PATH);
- },
- })(error);
+ redirectToSavedObjectPage(services, error, visualizationIdFromUrl);
} catch (e) {
toastNotifications.addWarning({
title: i18n.translate('visualize.createVisualization.failedToLoadErrorMessage', {
diff --git a/src/plugins/visualize/public/application/utils/utils.ts b/src/plugins/visualize/public/application/utils/utils.ts
index 0e529507f97e3..c906ff5304c90 100644
--- a/src/plugins/visualize/public/application/utils/utils.ts
+++ b/src/plugins/visualize/public/application/utils/utils.ts
@@ -10,6 +10,8 @@ import { i18n } from '@kbn/i18n';
import { ChromeStart, DocLinksStart } from 'kibana/public';
import { Filter } from '../../../../data/public';
+import { redirectWhenMissing } from '../../../../kibana_utils/public';
+import { VisualizeConstants } from '../visualize_constants';
import { VisualizeServices, VisualizeEditorVisInstance } from '../types';
export const addHelpMenuToAppChrome = (chrome: ChromeStart, docLinks: DocLinksStart) => {
@@ -58,3 +60,36 @@ export const visStateToEditorState = (
linked: savedVis && savedVis.id ? !!savedVis.savedSearchId : !!savedVisState.savedSearchId,
};
};
+
+export const redirectToSavedObjectPage = (
+ services: VisualizeServices,
+ error: any,
+ savedVisualizationsId?: string
+) => {
+ const {
+ history,
+ setActiveUrl,
+ toastNotifications,
+ http: { basePath },
+ application: { navigateToApp },
+ } = services;
+ const managementRedirectTarget = {
+ app: 'management',
+ path: `kibana/objects/savedVisualizations/${savedVisualizationsId}`,
+ };
+ redirectWhenMissing({
+ history,
+ navigateToApp,
+ toastNotifications,
+ basePath,
+ mapping: {
+ visualization: VisualizeConstants.LANDING_PAGE_PATH,
+ search: managementRedirectTarget,
+ 'index-pattern': managementRedirectTarget,
+ 'index-pattern-field': managementRedirectTarget,
+ },
+ onBeforeRedirect() {
+ setActiveUrl(VisualizeConstants.LANDING_PAGE_PATH);
+ },
+ })(error);
+};
diff --git a/test/common/services/index.ts b/test/common/services/index.ts
index 7404bd1d7f46e..cc4859b7016bf 100644
--- a/test/common/services/index.ts
+++ b/test/common/services/index.ts
@@ -15,6 +15,7 @@ import { RetryProvider } from './retry';
import { RandomnessProvider } from './randomness';
import { SecurityServiceProvider } from './security';
import { EsDeleteAllIndicesProvider } from './es_delete_all_indices';
+import { SavedObjectInfoProvider } from './saved_object_info';
export const services = {
deployment: DeploymentProvider,
@@ -26,4 +27,5 @@ export const services = {
randomness: RandomnessProvider,
security: SecurityServiceProvider,
esDeleteAllIndices: EsDeleteAllIndicesProvider,
+ savedObjectInfo: SavedObjectInfoProvider,
};
diff --git a/test/common/services/saved_object_info.ts b/test/common/services/saved_object_info.ts
new file mode 100644
index 0000000000000..02ab38d4ecb1d
--- /dev/null
+++ b/test/common/services/saved_object_info.ts
@@ -0,0 +1,53 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { Client } from '@elastic/elasticsearch';
+import url from 'url';
+import { Either, fromNullable, chain, getOrElse } from 'fp-ts/Either';
+import { flow } from 'fp-ts/function';
+import { FtrProviderContext } from '../ftr_provider_context';
+
+const pluck = (key: string) => (obj: any): Either =>
+ fromNullable(new Error(`Missing ${key}`))(obj[key]);
+
+const types = (node: string) => async (index: string = '.kibana') => {
+ let res: unknown;
+ try {
+ const { body } = await new Client({ node }).search({
+ index,
+ body: {
+ aggs: {
+ savedobjs: {
+ terms: {
+ field: 'type',
+ },
+ },
+ },
+ },
+ });
+
+ res = flow(
+ pluck('aggregations'),
+ chain(pluck('savedobjs')),
+ chain(pluck('buckets')),
+ getOrElse((err) => `${err.message}`)
+ )(body);
+ } catch (err) {
+ throw new Error(`Error while searching for saved object types: ${err}`);
+ }
+
+ return res;
+};
+
+export const SavedObjectInfoProvider: any = ({ getService }: FtrProviderContext) => {
+ const config = getService('config');
+
+ return {
+ types: types(url.format(config.get('servers.elasticsearch'))),
+ };
+};
diff --git a/test/functional/apps/discover/_discover.ts b/test/functional/apps/discover/_discover.ts
index cc62608fbde6d..bf90d90cc828c 100644
--- a/test/functional/apps/discover/_discover.ts
+++ b/test/functional/apps/discover/_discover.ts
@@ -11,6 +11,7 @@ import expect from '@kbn/expect';
import { FtrProviderContext } from '../../ftr_provider_context';
export default function ({ getService, getPageObjects }: FtrProviderContext) {
+ const savedObjectInfo = getService('savedObjectInfo');
const browser = getService('browser');
const log = getService('log');
const retry = getService('retry');
@@ -31,6 +32,9 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
await kibanaServer.savedObjects.clean({ types: ['search'] });
await kibanaServer.importExport.load('discover');
+ log.info(
+ `\n### SAVED OBJECT TYPES IN index: [.kibana]: \n\t${await savedObjectInfo.types()}`
+ );
// and load a set of makelogs data
await esArchiver.loadIfNeeded('logstash_functional');
diff --git a/test/functional/apps/visualize/input_control_vis/input_control_options.ts b/test/functional/apps/visualize/input_control_vis/input_control_options.ts
index dc02cada9a712..2e3b5d758436e 100644
--- a/test/functional/apps/visualize/input_control_vis/input_control_options.ts
+++ b/test/functional/apps/visualize/input_control_vis/input_control_options.ts
@@ -31,7 +31,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
);
await PageObjects.visEditor.clickVisEditorTab('controls');
await PageObjects.visEditor.addInputControl();
- await comboBox.set('indexPatternSelect-0', 'logstash- ');
+ await comboBox.set('indexPatternSelect-0', 'logstash-');
await comboBox.set('fieldSelect-0', FIELD_NAME);
await PageObjects.visEditor.clickGo();
});
diff --git a/typings/elasticsearch/search.d.ts b/typings/elasticsearch/search.d.ts
index fce08df1c0a04..c9bf3b1d8b7bc 100644
--- a/typings/elasticsearch/search.d.ts
+++ b/typings/elasticsearch/search.d.ts
@@ -370,6 +370,16 @@ export type AggregateOf<
missing: {
doc_count: number;
} & SubAggregateOf;
+ multi_terms: {
+ doc_count_error_upper_bound: number;
+ sum_other_doc_count: number;
+ buckets: Array<
+ {
+ doc_count: number;
+ key: string[];
+ } & SubAggregateOf
+ >;
+ };
nested: {
doc_count: number;
} & SubAggregateOf;
diff --git a/vars/retryable.groovy b/vars/retryable.groovy
index ed84a00ece49d..bfd021ddd8167 100644
--- a/vars/retryable.groovy
+++ b/vars/retryable.groovy
@@ -48,7 +48,10 @@ def call(label, Closure closure) {
try {
closure()
- } catch (ex) {
+ } catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException ex) {
+ // If the build was aborted, don't retry the step
+ throw ex
+ } catch (Exception ex) {
if (haveReachedMaxRetries()) {
print "Couldn't retry '${label}', have already reached the max number of retries for this build."
throw ex
diff --git a/x-pack/examples/alerting_example/server/plugin.ts b/x-pack/examples/alerting_example/server/plugin.ts
index db9c996147c94..f6131679874db 100644
--- a/x-pack/examples/alerting_example/server/plugin.ts
+++ b/x-pack/examples/alerting_example/server/plugin.ts
@@ -33,7 +33,7 @@ export class AlertingExamplePlugin implements Plugin {
+ it('handles empty variables', () => {
+ expect(buildAlertHistoryDocument({})).toBeNull();
+ });
+
+ it('returns null if rule type is not defined', () => {
+ expect(buildAlertHistoryDocument(getVariables({ rule: { type: undefined } }))).toBeNull();
+ });
+
+ it('returns null if alert variables are not defined', () => {
+ expect(buildAlertHistoryDocument(getVariables({ alert: undefined }))).toBeNull();
+ });
+
+ it('returns null if rule variables are not defined', () => {
+ expect(buildAlertHistoryDocument(getVariables({ rule: undefined }))).toBeNull();
+ });
+
+ it('includes @timestamp field if date is null', () => {
+ const alertHistoryDoc = buildAlertHistoryDocument(getVariables({ date: undefined }));
+ expect(alertHistoryDoc).not.toBeNull();
+ expect(alertHistoryDoc!['@timestamp']).toBeTruthy();
+ });
+
+ it(`doesn't include context if context is empty`, () => {
+ const alertHistoryDoc = buildAlertHistoryDocument(getVariables({ context: {} }));
+ expect(alertHistoryDoc).not.toBeNull();
+ expect(alertHistoryDoc!.kibana?.alert?.context).toBeFalsy();
+ });
+
+ it(`doesn't include params if params is empty`, () => {
+ const alertHistoryDoc = buildAlertHistoryDocument(getVariables({ params: {} }));
+ expect(alertHistoryDoc).not.toBeNull();
+ expect(alertHistoryDoc!.rule?.params).toBeFalsy();
+ });
+
+ it(`doesn't include tags if tags is empty array`, () => {
+ const alertHistoryDoc = buildAlertHistoryDocument(getVariables({ tags: [] }));
+ expect(alertHistoryDoc).not.toBeNull();
+ expect(alertHistoryDoc!.tags).toBeFalsy();
+ });
+
+ it(`included message if context contains message`, () => {
+ const alertHistoryDoc = buildAlertHistoryDocument(
+ getVariables({
+ context: { contextVar1: 'contextValue1', contextVar2: 'contextValue2', message: 'hello!' },
+ })
+ );
+ expect(alertHistoryDoc).not.toBeNull();
+ expect(alertHistoryDoc!.message).toEqual('hello!');
+ });
+
+ it('builds alert history document from variables', () => {
+ expect(buildAlertHistoryDocument(getVariables())).toEqual({
+ '@timestamp': '2021-01-01T00:00:00.000Z',
+ kibana: {
+ alert: {
+ actionGroup: 'action-group-id',
+ actionGroupName: 'Action Group',
+ context: {
+ 'rule-type': {
+ contextVar1: 'contextValue1',
+ contextVar2: 'contextValue2',
+ },
+ },
+ id: 'alert-id',
+ },
+ },
+ event: {
+ kind: 'alert',
+ },
+ rule: {
+ id: 'rule-id',
+ name: 'rule-name',
+ params: {
+ 'rule-type': {
+ ruleParam: 1,
+ ruleParamString: 'another param',
+ },
+ },
+ space: 'space-id',
+ type: 'rule-type',
+ },
+ tags: ['abc', 'def'],
+ });
+ });
+});
diff --git a/x-pack/plugins/actions/common/alert_history_schema.ts b/x-pack/plugins/actions/common/alert_history_schema.ts
new file mode 100644
index 0000000000000..e1c923ab23f44
--- /dev/null
+++ b/x-pack/plugins/actions/common/alert_history_schema.ts
@@ -0,0 +1,90 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { isEmpty } from 'lodash';
+
+export const ALERT_HISTORY_PREFIX = 'kibana-alert-history-';
+export const AlertHistoryDefaultIndexName = `${ALERT_HISTORY_PREFIX}default`;
+export const AlertHistoryEsIndexConnectorId = 'preconfigured-alert-history-es-index';
+
+export const buildAlertHistoryDocument = (variables: Record) => {
+ const { date, alert: alertVariables, context, params, tags, rule: ruleVariables } = variables as {
+ date: string;
+ alert: Record;
+ context: Record;
+ params: Record;
+ rule: Record;
+ tags: string[];
+ };
+
+ if (!alertVariables || !ruleVariables) {
+ return null;
+ }
+
+ const { actionGroup, actionGroupName, id: alertId } = alertVariables as {
+ actionGroup: string;
+ actionGroupName: string;
+ id: string;
+ };
+
+ const { id: ruleId, name, spaceId, type } = ruleVariables as {
+ id: string;
+ name: string;
+ spaceId: string;
+ type: string;
+ };
+
+ if (!type) {
+ // can't build the document without a type
+ return null;
+ }
+
+ const ruleType = type.replace(/\./g, '__');
+
+ const rule = {
+ ...(ruleId ? { id: ruleId } : {}),
+ ...(name ? { name } : {}),
+ ...(!isEmpty(params) ? { params: { [ruleType]: params } } : {}),
+ ...(spaceId ? { space: spaceId } : {}),
+ ...(type ? { type } : {}),
+ };
+ const alert = {
+ ...(alertId ? { id: alertId } : {}),
+ ...(!isEmpty(context) ? { context: { [ruleType]: context } } : {}),
+ ...(actionGroup ? { actionGroup } : {}),
+ ...(actionGroupName ? { actionGroupName } : {}),
+ };
+
+ const alertHistoryDoc = {
+ '@timestamp': date ? date : new Date().toISOString(),
+ ...(tags && tags.length > 0 ? { tags } : {}),
+ ...(context?.message ? { message: context.message } : {}),
+ ...(!isEmpty(rule) ? { rule } : {}),
+ ...(!isEmpty(alert) ? { kibana: { alert } } : {}),
+ };
+
+ return !isEmpty(alertHistoryDoc) ? { ...alertHistoryDoc, event: { kind: 'alert' } } : null;
+};
+
+export const AlertHistoryDocumentTemplate = Object.freeze(
+ buildAlertHistoryDocument({
+ rule: {
+ id: '{{rule.id}}',
+ name: '{{rule.name}}',
+ type: '{{rule.type}}',
+ spaceId: '{{rule.spaceId}}',
+ },
+ context: '{{context}}',
+ params: '{{params}}',
+ tags: '{{rule.tags}}',
+ alert: {
+ id: '{{alert.id}}',
+ actionGroup: '{{alert.actionGroup}}',
+ actionGroupName: '{{alert.actionGroupName}}',
+ },
+ })
+);
diff --git a/x-pack/plugins/actions/common/index.ts b/x-pack/plugins/actions/common/index.ts
index 184ae9c226b8f..336aa2263af0c 100644
--- a/x-pack/plugins/actions/common/index.ts
+++ b/x-pack/plugins/actions/common/index.ts
@@ -6,7 +6,7 @@
*/
export * from './types';
+export * from './alert_history_schema';
+export * from './rewrite_request_case';
export const BASE_ACTION_API_PATH = '/api/actions';
-
-export * from './rewrite_request_case';
diff --git a/x-pack/plugins/actions/server/actions_client.test.ts b/x-pack/plugins/actions/server/actions_client.test.ts
index 6544a3c426e42..ae7faca1465c7 100644
--- a/x-pack/plugins/actions/server/actions_client.test.ts
+++ b/x-pack/plugins/actions/server/actions_client.test.ts
@@ -405,6 +405,7 @@ describe('create()', () => {
enabled: true,
enabledActionTypes: ['some-not-ignored-action-type'],
allowedHosts: ['*'],
+ preconfiguredAlertHistoryEsIndex: false,
preconfigured: {},
proxyRejectUnauthorizedCertificates: true,
rejectUnauthorized: true,
diff --git a/x-pack/plugins/actions/server/actions_config.test.ts b/x-pack/plugins/actions/server/actions_config.test.ts
index c81f1f4a4bf2e..1b9de0162f340 100644
--- a/x-pack/plugins/actions/server/actions_config.test.ts
+++ b/x-pack/plugins/actions/server/actions_config.test.ts
@@ -18,6 +18,7 @@ const defaultActionsConfig: ActionsConfig = {
enabled: false,
allowedHosts: [],
enabledActionTypes: [],
+ preconfiguredAlertHistoryEsIndex: false,
preconfigured: {},
proxyRejectUnauthorizedCertificates: true,
rejectUnauthorized: true,
diff --git a/x-pack/plugins/actions/server/builtin_action_types/es_index.test.ts b/x-pack/plugins/actions/server/builtin_action_types/es_index.test.ts
index 282ff22f770f0..5c0f720e8c5fc 100644
--- a/x-pack/plugins/actions/server/builtin_action_types/es_index.test.ts
+++ b/x-pack/plugins/actions/server/builtin_action_types/es_index.test.ts
@@ -18,6 +18,7 @@ import {
ESIndexActionType,
ESIndexActionTypeExecutorOptions,
} from './es_index';
+import { AlertHistoryEsIndexConnectorId } from '../../common';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { elasticsearchClientMock } from '../../../../../src/core/server/elasticsearch/client/mocks';
@@ -115,6 +116,7 @@ describe('params validation', () => {
test('params validation succeeds when params is valid', () => {
const params: Record = {
documents: [{ rando: 'thing' }],
+ indexOverride: null,
};
expect(validateParams(actionType, params)).toMatchInlineSnapshot(`
Object {
@@ -123,6 +125,7 @@ describe('params validation', () => {
"rando": "thing",
},
],
+ "indexOverride": null,
}
`);
});
@@ -159,6 +162,7 @@ describe('execute()', () => {
config = { index: 'index-value', refresh: false, executionTimeField: null };
params = {
documents: [{ jim: 'bob' }],
+ indexOverride: null,
};
const actionId = 'some-id';
@@ -200,6 +204,7 @@ describe('execute()', () => {
config = { index: 'index-value', executionTimeField: 'field_to_use_for_time', refresh: true };
params = {
documents: [{ jimbob: 'jr' }],
+ indexOverride: null,
};
executorOptions = { actionId, config, secrets, params, services };
@@ -237,6 +242,7 @@ describe('execute()', () => {
config = { index: 'index-value', executionTimeField: null, refresh: false };
params = {
documents: [{ jim: 'bob' }],
+ indexOverride: null,
};
executorOptions = { actionId, config, secrets, params, services };
@@ -270,6 +276,7 @@ describe('execute()', () => {
config = { index: 'index-value', executionTimeField: null, refresh: false };
params = {
documents: [{ a: 1 }, { b: 2 }],
+ indexOverride: null,
};
executorOptions = { actionId, config, secrets, params, services };
@@ -305,12 +312,244 @@ describe('execute()', () => {
`);
});
+ test('renders parameter templates as expected', async () => {
+ expect(actionType.renderParameterTemplates).toBeTruthy();
+ const paramsWithTemplates = {
+ documents: [{ hello: '{{who}}' }],
+ indexOverride: null,
+ };
+ const variables = {
+ who: 'world',
+ };
+ const renderedParams = actionType.renderParameterTemplates!(
+ paramsWithTemplates,
+ variables,
+ 'action-type-id'
+ );
+ expect(renderedParams).toMatchInlineSnapshot(`
+ Object {
+ "documents": Array [
+ Object {
+ "hello": "world",
+ },
+ ],
+ "indexOverride": null,
+ }
+ `);
+ });
+
+ test('ignores indexOverride for generic es index connector', async () => {
+ expect(actionType.renderParameterTemplates).toBeTruthy();
+ const paramsWithTemplates = {
+ documents: [{ hello: '{{who}}' }],
+ indexOverride: 'hello-world',
+ };
+ const variables = {
+ who: 'world',
+ };
+ const renderedParams = actionType.renderParameterTemplates!(
+ paramsWithTemplates,
+ variables,
+ 'action-type-id'
+ );
+ expect(renderedParams).toMatchInlineSnapshot(`
+ Object {
+ "documents": Array [
+ Object {
+ "hello": "world",
+ },
+ ],
+ "indexOverride": null,
+ }
+ `);
+ });
+
+ test('renders parameter templates as expected for preconfigured alert history connector', async () => {
+ expect(actionType.renderParameterTemplates).toBeTruthy();
+ const paramsWithTemplates = {
+ documents: [{ hello: '{{who}}' }],
+ indexOverride: null,
+ };
+ const variables = {
+ date: '2021-01-01T00:00:00.000Z',
+ rule: {
+ id: 'rule-id',
+ name: 'rule-name',
+ type: 'rule-type',
+ },
+ context: {
+ contextVar1: 'contextValue1',
+ contextVar2: 'contextValue2',
+ },
+ params: {
+ ruleParam: 1,
+ ruleParamString: 'another param',
+ },
+ tags: ['abc', 'xyz'],
+ alert: {
+ id: 'alert-id',
+ actionGroup: 'action-group-id',
+ actionGroupName: 'Action Group',
+ },
+ state: {
+ alertStateValue: true,
+ alertStateAnotherValue: 'yes',
+ },
+ };
+ const renderedParams = actionType.renderParameterTemplates!(
+ paramsWithTemplates,
+ variables,
+ AlertHistoryEsIndexConnectorId
+ );
+ expect(renderedParams).toMatchInlineSnapshot(`
+ Object {
+ "documents": Array [
+ Object {
+ "@timestamp": "2021-01-01T00:00:00.000Z",
+ "event": Object {
+ "kind": "alert",
+ },
+ "kibana": Object {
+ "alert": Object {
+ "actionGroup": "action-group-id",
+ "actionGroupName": "Action Group",
+ "context": Object {
+ "rule-type": Object {
+ "contextVar1": "contextValue1",
+ "contextVar2": "contextValue2",
+ },
+ },
+ "id": "alert-id",
+ },
+ },
+ "rule": Object {
+ "id": "rule-id",
+ "name": "rule-name",
+ "params": Object {
+ "rule-type": Object {
+ "ruleParam": 1,
+ "ruleParamString": "another param",
+ },
+ },
+ "type": "rule-type",
+ },
+ "tags": Array [
+ "abc",
+ "xyz",
+ ],
+ },
+ ],
+ "indexOverride": null,
+ }
+ `);
+ });
+
+ test('passes through indexOverride for preconfigured alert history connector', async () => {
+ expect(actionType.renderParameterTemplates).toBeTruthy();
+ const paramsWithTemplates = {
+ documents: [{ hello: '{{who}}' }],
+ indexOverride: 'hello-world',
+ };
+ const variables = {
+ date: '2021-01-01T00:00:00.000Z',
+ rule: {
+ id: 'rule-id',
+ name: 'rule-name',
+ type: 'rule-type',
+ },
+ context: {
+ contextVar1: 'contextValue1',
+ contextVar2: 'contextValue2',
+ },
+ params: {
+ ruleParam: 1,
+ ruleParamString: 'another param',
+ },
+ tags: ['abc', 'xyz'],
+ alert: {
+ id: 'alert-id',
+ actionGroup: 'action-group-id',
+ actionGroupName: 'Action Group',
+ },
+ state: {
+ alertStateValue: true,
+ alertStateAnotherValue: 'yes',
+ },
+ };
+ const renderedParams = actionType.renderParameterTemplates!(
+ paramsWithTemplates,
+ variables,
+ AlertHistoryEsIndexConnectorId
+ );
+ expect(renderedParams).toMatchInlineSnapshot(`
+ Object {
+ "documents": Array [
+ Object {
+ "@timestamp": "2021-01-01T00:00:00.000Z",
+ "event": Object {
+ "kind": "alert",
+ },
+ "kibana": Object {
+ "alert": Object {
+ "actionGroup": "action-group-id",
+ "actionGroupName": "Action Group",
+ "context": Object {
+ "rule-type": Object {
+ "contextVar1": "contextValue1",
+ "contextVar2": "contextValue2",
+ },
+ },
+ "id": "alert-id",
+ },
+ },
+ "rule": Object {
+ "id": "rule-id",
+ "name": "rule-name",
+ "params": Object {
+ "rule-type": Object {
+ "ruleParam": 1,
+ "ruleParamString": "another param",
+ },
+ },
+ "type": "rule-type",
+ },
+ "tags": Array [
+ "abc",
+ "xyz",
+ ],
+ },
+ ],
+ "indexOverride": "hello-world",
+ }
+ `);
+ });
+
+ test('throws error for preconfigured alert history index when no variables are available', async () => {
+ expect(actionType.renderParameterTemplates).toBeTruthy();
+ const paramsWithTemplates = {
+ documents: [{ hello: '{{who}}' }],
+ indexOverride: null,
+ };
+ const variables = {};
+
+ expect(() =>
+ actionType.renderParameterTemplates!(
+ paramsWithTemplates,
+ variables,
+ AlertHistoryEsIndexConnectorId
+ )
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"error creating alert history document for ${AlertHistoryEsIndexConnectorId} connector"`
+ );
+ });
+
test('resolves with an error when an error occurs in the indexing operation', async () => {
const secrets = {};
// minimal params
const config = { index: 'index-value', refresh: false, executionTimeField: null };
const params = {
documents: [{ '': 'bob' }],
+ indexOverride: null,
};
const actionId = 'some-id';
diff --git a/x-pack/plugins/actions/server/builtin_action_types/es_index.ts b/x-pack/plugins/actions/server/builtin_action_types/es_index.ts
index f7b0e7de478d8..3662fea00e31d 100644
--- a/x-pack/plugins/actions/server/builtin_action_types/es_index.ts
+++ b/x-pack/plugins/actions/server/builtin_action_types/es_index.ts
@@ -8,9 +8,11 @@
import { curry, find } from 'lodash';
import { i18n } from '@kbn/i18n';
import { schema, TypeOf } from '@kbn/config-schema';
-
import { Logger } from '../../../../../src/core/server';
import { ActionType, ActionTypeExecutorOptions, ActionTypeExecutorResult } from '../types';
+import { renderMustacheObject } from '../lib/mustache_renderer';
+import { buildAlertHistoryDocument, AlertHistoryEsIndexConnectorId } from '../../common';
+import { ALERT_HISTORY_PREFIX } from '../../common/alert_history_schema';
export type ESIndexActionType = ActionType;
export type ESIndexActionTypeExecutorOptions = ActionTypeExecutorOptions<
@@ -38,6 +40,15 @@ export type ActionParamsType = TypeOf;
// eventually: https://github.com/elastic/kibana/projects/26#card-24087404
const ParamsSchema = schema.object({
documents: schema.arrayOf(schema.recordOf(schema.string(), schema.any())),
+ indexOverride: schema.nullable(
+ schema.string({
+ validate: (pattern) => {
+ if (!pattern.startsWith(ALERT_HISTORY_PREFIX)) {
+ return `index must start with "${ALERT_HISTORY_PREFIX}"`;
+ }
+ },
+ })
+ ),
});
export const ActionTypeId = '.index';
@@ -54,6 +65,7 @@ export function getActionType({ logger }: { logger: Logger }): ESIndexActionType
params: ParamsSchema,
},
executor: curry(executor)({ logger }),
+ renderParameterTemplates,
};
}
@@ -68,7 +80,7 @@ async function executor(
const params = execOptions.params;
const services = execOptions.services;
- const index = config.index;
+ const index = params.indexOverride || config.index;
const bulkBody = [];
for (const document of params.documents) {
@@ -107,6 +119,24 @@ async function executor(
}
}
+function renderParameterTemplates(
+ params: ActionParamsType,
+ variables: Record,
+ actionId: string
+): ActionParamsType {
+ const { documents, indexOverride } = renderMustacheObject(params, variables);
+
+ if (actionId === AlertHistoryEsIndexConnectorId) {
+ const alertHistoryDoc = buildAlertHistoryDocument(variables);
+ if (!alertHistoryDoc) {
+ throw new Error(`error creating alert history document for ${actionId} connector`);
+ }
+ return { documents: [alertHistoryDoc], indexOverride };
+ }
+
+ return { documents, indexOverride: null };
+}
+
function wrapErr(
errMessage: string,
actionId: string,
diff --git a/x-pack/plugins/actions/server/config.test.ts b/x-pack/plugins/actions/server/config.test.ts
index 2eecaa19da0c5..ad598bffe04b4 100644
--- a/x-pack/plugins/actions/server/config.test.ts
+++ b/x-pack/plugins/actions/server/config.test.ts
@@ -31,6 +31,7 @@ describe('config validation', () => {
"valueInBytes": 1048576,
},
"preconfigured": Object {},
+ "preconfiguredAlertHistoryEsIndex": false,
"proxyRejectUnauthorizedCertificates": true,
"rejectUnauthorized": true,
"responseTimeout": "PT1M",
@@ -74,6 +75,7 @@ describe('config validation', () => {
"secrets": Object {},
},
},
+ "preconfiguredAlertHistoryEsIndex": false,
"proxyRejectUnauthorizedCertificates": false,
"rejectUnauthorized": false,
"responseTimeout": "PT1M",
diff --git a/x-pack/plugins/actions/server/config.ts b/x-pack/plugins/actions/server/config.ts
index 4aa77ded315b8..36948478816c9 100644
--- a/x-pack/plugins/actions/server/config.ts
+++ b/x-pack/plugins/actions/server/config.ts
@@ -37,6 +37,7 @@ export const configSchema = schema.object({
defaultValue: [AllowedHosts.Any],
}
),
+ preconfiguredAlertHistoryEsIndex: schema.boolean({ defaultValue: false }),
preconfigured: schema.recordOf(schema.string(), preconfiguredActionSchema, {
defaultValue: {},
validate: validatePreconfigured,
diff --git a/x-pack/plugins/actions/server/mocks.ts b/x-pack/plugins/actions/server/mocks.ts
index ab29f524c202d..4d32c2e2bf16d 100644
--- a/x-pack/plugins/actions/server/mocks.ts
+++ b/x-pack/plugins/actions/server/mocks.ts
@@ -40,10 +40,11 @@ const createStartMock = () => {
// this is a default renderer that escapes nothing
export function renderActionParameterTemplatesDefault(
actionTypeId: string,
+ actionId: string,
params: Record,
variables: Record
) {
- return renderActionParameterTemplates(undefined, actionTypeId, params, variables);
+ return renderActionParameterTemplates(undefined, actionTypeId, actionId, params, variables);
}
const createServicesMock = () => {
diff --git a/x-pack/plugins/actions/server/plugin.test.ts b/x-pack/plugins/actions/server/plugin.test.ts
index 30bbedbedbe9c..3485891a01267 100644
--- a/x-pack/plugins/actions/server/plugin.test.ts
+++ b/x-pack/plugins/actions/server/plugin.test.ts
@@ -23,6 +23,7 @@ import {
ActionsPluginsStart,
PluginSetupContract,
} from './plugin';
+import { AlertHistoryEsIndexConnectorId } from '../common';
describe('Actions Plugin', () => {
describe('setup()', () => {
@@ -36,6 +37,7 @@ describe('Actions Plugin', () => {
enabled: true,
enabledActionTypes: ['*'],
allowedHosts: ['*'],
+ preconfiguredAlertHistoryEsIndex: false,
preconfigured: {},
proxyRejectUnauthorizedCertificates: true,
rejectUnauthorized: true,
@@ -180,6 +182,7 @@ describe('Actions Plugin', () => {
});
describe('start()', () => {
+ let context: PluginInitializerContext;
let plugin: ActionsPlugin;
let coreSetup: ReturnType;
let coreStart: ReturnType;
@@ -187,10 +190,11 @@ describe('Actions Plugin', () => {
let pluginsStart: jest.Mocked;
beforeEach(() => {
- const context = coreMock.createPluginInitializerContext({
+ context = coreMock.createPluginInitializerContext({
enabled: true,
enabledActionTypes: ['*'],
allowedHosts: ['*'],
+ preconfiguredAlertHistoryEsIndex: false,
preconfigured: {
preconfiguredServerLog: {
actionTypeId: '.server-log',
@@ -223,15 +227,6 @@ describe('Actions Plugin', () => {
});
describe('getActionsClientWithRequest()', () => {
- it('should handle preconfigured actions', async () => {
- // coreMock.createSetup doesn't support Plugin generics
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- await plugin.setup(coreSetup as any, pluginsSetup);
- const pluginStart = await plugin.start(coreStart, pluginsStart);
-
- expect(pluginStart.isActionExecutable('preconfiguredServerLog', '.server-log')).toBe(true);
- });
-
it('should not throw error when ESO plugin has encryption key', async () => {
await plugin.setup(coreSetup, {
...pluginsSetup,
@@ -258,6 +253,99 @@ describe('Actions Plugin', () => {
});
});
+ describe('Preconfigured connectors', () => {
+ function getConfig(overrides = {}) {
+ return {
+ enabled: true,
+ enabledActionTypes: ['*'],
+ allowedHosts: ['*'],
+ preconfiguredAlertHistoryEsIndex: false,
+ preconfigured: {
+ preconfiguredServerLog: {
+ actionTypeId: '.server-log',
+ name: 'preconfigured-server-log',
+ config: {},
+ secrets: {},
+ },
+ },
+ proxyRejectUnauthorizedCertificates: true,
+ proxyBypassHosts: undefined,
+ proxyOnlyHosts: undefined,
+ rejectUnauthorized: true,
+ maxResponseContentLength: new ByteSizeValue(1000000),
+ responseTimeout: moment.duration('60s'),
+ ...overrides,
+ };
+ }
+
+ function setup(config: ActionsConfig) {
+ context = coreMock.createPluginInitializerContext(config);
+ plugin = new ActionsPlugin(context);
+ coreSetup = coreMock.createSetup();
+ coreStart = coreMock.createStart();
+ pluginsSetup = {
+ taskManager: taskManagerMock.createSetup(),
+ encryptedSavedObjects: encryptedSavedObjectsMock.createSetup(),
+ licensing: licensingMock.createSetup(),
+ eventLog: eventLogMock.createSetup(),
+ usageCollection: usageCollectionPluginMock.createSetupContract(),
+ features: featuresPluginMock.createSetup(),
+ };
+ pluginsStart = {
+ licensing: licensingMock.createStart(),
+ taskManager: taskManagerMock.createStart(),
+ encryptedSavedObjects: encryptedSavedObjectsMock.createStart(),
+ };
+ }
+
+ it('should handle preconfigured actions', async () => {
+ setup(getConfig());
+ // coreMock.createSetup doesn't support Plugin generics
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ await plugin.setup(coreSetup as any, pluginsSetup);
+ const pluginStart = await plugin.start(coreStart, pluginsStart);
+
+ expect(pluginStart.preconfiguredActions.length).toEqual(1);
+ expect(pluginStart.isActionExecutable('preconfiguredServerLog', '.server-log')).toBe(true);
+ });
+
+ it('should handle preconfiguredAlertHistoryEsIndex = true', async () => {
+ setup(getConfig({ preconfiguredAlertHistoryEsIndex: true }));
+
+ await plugin.setup(coreSetup, pluginsSetup);
+ const pluginStart = await plugin.start(coreStart, pluginsStart);
+
+ expect(pluginStart.preconfiguredActions.length).toEqual(2);
+ expect(
+ pluginStart.isActionExecutable('preconfigured-alert-history-es-index', '.index')
+ ).toBe(true);
+ });
+
+ it('should not allow preconfigured connector with same ID as AlertHistoryEsIndexConnectorId', async () => {
+ setup(
+ getConfig({
+ preconfigured: {
+ [AlertHistoryEsIndexConnectorId]: {
+ actionTypeId: '.index',
+ name: 'clashing preconfigured index connector',
+ config: {},
+ secrets: {},
+ },
+ },
+ })
+ );
+ // coreMock.createSetup doesn't support Plugin generics
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ await plugin.setup(coreSetup as any, pluginsSetup);
+ const pluginStart = await plugin.start(coreStart, pluginsStart);
+
+ expect(pluginStart.preconfiguredActions.length).toEqual(0);
+ expect(context.logger.get().warn).toHaveBeenCalledWith(
+ `Preconfigured connectors cannot have the id "${AlertHistoryEsIndexConnectorId}" because this is a reserved id.`
+ );
+ });
+ });
+
describe('isActionTypeEnabled()', () => {
const actionType: ActionType = {
id: 'my-action-type',
diff --git a/x-pack/plugins/actions/server/plugin.ts b/x-pack/plugins/actions/server/plugin.ts
index bfe3b0a09ff2e..3c754d90c4af5 100644
--- a/x-pack/plugins/actions/server/plugin.ts
+++ b/x-pack/plugins/actions/server/plugin.ts
@@ -68,6 +68,9 @@ import {
} from './authorization/get_authorization_mode_by_source';
import { ensureSufficientLicense } from './lib/ensure_sufficient_license';
import { renderMustacheObject } from './lib/mustache_renderer';
+import { getAlertHistoryEsIndex } from './preconfigured_connectors/alert_history_es_index/alert_history_es_index';
+import { createAlertHistoryIndexTemplate } from './preconfigured_connectors/alert_history_es_index/create_alert_history_index_template';
+import { AlertHistoryEsIndexConnectorId } from '../common';
const EVENT_LOG_PROVIDER = 'actions';
export const EVENT_LOG_ACTIONS = {
@@ -98,6 +101,7 @@ export interface PluginStartContract {
preconfiguredActions: PreConfiguredAction[];
renderActionParameterTemplates(
actionTypeId: string,
+ actionId: string,
params: Params,
variables: Record
): Params;
@@ -178,12 +182,22 @@ export class ActionsPlugin implements Plugin {
return this.actionTypeRegistry!.isActionTypeEnabled(id, options);
@@ -468,12 +489,13 @@ export class ActionsPlugin implements Plugin(
actionTypeRegistry: ActionTypeRegistry | undefined,
actionTypeId: string,
+ actionId: string,
params: Params,
variables: Record
): Params {
const actionType = actionTypeRegistry?.get(actionTypeId);
if (actionType?.renderParameterTemplates) {
- return actionType.renderParameterTemplates(params, variables) as Params;
+ return actionType.renderParameterTemplates(params, variables, actionId) as Params;
} else {
return renderMustacheObject(params, variables);
}
diff --git a/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/alert_history_es_index.ts b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/alert_history_es_index.ts
new file mode 100644
index 0000000000000..38556591c4ea2
--- /dev/null
+++ b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/alert_history_es_index.ts
@@ -0,0 +1,26 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { i18n } from '@kbn/i18n';
+import { PreConfiguredAction } from '../../types';
+import { ActionTypeId as EsIndexActionTypeId } from '../../builtin_action_types/es_index';
+import { AlertHistoryEsIndexConnectorId, AlertHistoryDefaultIndexName } from '../../../common';
+
+export function getAlertHistoryEsIndex(): Readonly {
+ return Object.freeze({
+ name: i18n.translate('xpack.actions.alertHistoryEsIndexConnector.name', {
+ defaultMessage: 'Alert history Elasticsearch index',
+ }),
+ actionTypeId: EsIndexActionTypeId,
+ id: AlertHistoryEsIndexConnectorId,
+ isPreconfigured: true,
+ config: {
+ index: AlertHistoryDefaultIndexName,
+ },
+ secrets: {},
+ });
+}
diff --git a/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/create_alert_history_index_template.test.ts b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/create_alert_history_index_template.test.ts
new file mode 100644
index 0000000000000..a7038d8dc62eb
--- /dev/null
+++ b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/create_alert_history_index_template.test.ts
@@ -0,0 +1,52 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { ElasticsearchClient } from 'src/core/server';
+import { elasticsearchServiceMock, loggingSystemMock } from 'src/core/server/mocks';
+import { DeeplyMockedKeys } from '@kbn/utility-types/jest';
+import {
+ createAlertHistoryIndexTemplate,
+ getAlertHistoryIndexTemplate,
+} from './create_alert_history_index_template';
+
+type MockedLogger = ReturnType;
+
+describe('createAlertHistoryIndexTemplate', () => {
+ let logger: MockedLogger;
+ let clusterClient: DeeplyMockedKeys;
+
+ beforeEach(() => {
+ logger = loggingSystemMock.createLogger();
+ clusterClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
+ });
+
+ test(`should create index template if it doesn't exist`, async () => {
+ // Response type for existsIndexTemplate is still TODO
+ clusterClient.indices.existsIndexTemplate.mockResolvedValue({
+ body: false,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ } as any);
+
+ await createAlertHistoryIndexTemplate({ client: clusterClient, logger });
+ expect(clusterClient.indices.putIndexTemplate).toHaveBeenCalledWith({
+ name: `kibana-alert-history-template`,
+ body: getAlertHistoryIndexTemplate(),
+ create: true,
+ });
+ });
+
+ test(`shouldn't create index template if it already exists`, async () => {
+ // Response type for existsIndexTemplate is still TODO
+ clusterClient.indices.existsIndexTemplate.mockResolvedValue({
+ body: true,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ } as any);
+
+ await createAlertHistoryIndexTemplate({ client: clusterClient, logger });
+ expect(clusterClient.indices.putIndexTemplate).not.toHaveBeenCalled();
+ });
+});
diff --git a/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/create_alert_history_index_template.ts b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/create_alert_history_index_template.ts
new file mode 100644
index 0000000000000..fe9874fb1d671
--- /dev/null
+++ b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/create_alert_history_index_template.ts
@@ -0,0 +1,106 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { ElasticsearchClient, Logger } from 'src/core/server';
+import { ALERT_HISTORY_PREFIX } from '../../../common';
+import mappings from './mappings.json';
+
+export function getAlertHistoryIndexTemplate() {
+ return {
+ index_patterns: [`${ALERT_HISTORY_PREFIX}*`],
+ _meta: {
+ description:
+ 'System generated mapping for preconfigured alert history Elasticsearch index connector.',
+ },
+ template: {
+ settings: {
+ number_of_shards: 1,
+ auto_expand_replicas: '0-1',
+ },
+ mappings,
+ },
+ };
+}
+
+async function doesIndexTemplateExist({
+ client,
+ templateName,
+}: {
+ client: ElasticsearchClient;
+ templateName: string;
+}) {
+ let result;
+ try {
+ result = (await client.indices.existsIndexTemplate({ name: templateName })).body;
+ } catch (err) {
+ throw new Error(`error checking existence of index template: ${err.message}`);
+ }
+
+ return result;
+}
+
+async function createIndexTemplate({
+ client,
+ template,
+ templateName,
+}: {
+ client: ElasticsearchClient;
+ template: Record;
+ templateName: string;
+}) {
+ try {
+ await client.indices.putIndexTemplate({
+ name: templateName,
+ body: template,
+ create: true,
+ });
+ } catch (err) {
+ // The error message doesn't have a type attribute we can look to guarantee it's due
+ // to the template already existing (only long message) so we'll check ourselves to see
+ // if the template now exists. This scenario would happen if you startup multiple Kibana
+ // instances at the same time.
+ const existsNow = await doesIndexTemplateExist({ client, templateName });
+ if (!existsNow) {
+ throw new Error(`error creating index template: ${err.message}`);
+ }
+ }
+}
+
+async function createIndexTemplateIfNotExists({
+ client,
+ template,
+ templateName,
+}: {
+ client: ElasticsearchClient;
+ template: Record;
+ templateName: string;
+}) {
+ const indexTemplateExists = await doesIndexTemplateExist({ client, templateName });
+
+ if (!indexTemplateExists) {
+ await createIndexTemplate({ client, template, templateName });
+ }
+}
+
+export async function createAlertHistoryIndexTemplate({
+ client,
+ logger,
+}: {
+ client: ElasticsearchClient;
+ logger: Logger;
+}) {
+ try {
+ const indexTemplate = getAlertHistoryIndexTemplate();
+ await createIndexTemplateIfNotExists({
+ client,
+ templateName: `${ALERT_HISTORY_PREFIX}template`,
+ template: indexTemplate,
+ });
+ } catch (err) {
+ logger.error(`Could not initialize alert history index with mappings: ${err.message}.`);
+ }
+}
diff --git a/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/mappings.json b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/mappings.json
new file mode 100644
index 0000000000000..56047f30d9489
--- /dev/null
+++ b/x-pack/plugins/actions/server/preconfigured_connectors/alert_history_es_index/mappings.json
@@ -0,0 +1,84 @@
+{
+ "dynamic": "false",
+ "properties": {
+ "@timestamp": {
+ "type": "date"
+ },
+ "kibana": {
+ "properties": {
+ "alert": {
+ "properties": {
+ "actionGroup": {
+ "type": "keyword"
+ },
+ "actionGroupName": {
+ "type": "keyword"
+ },
+ "actionSubgroup": {
+ "type": "keyword"
+ },
+ "context": {
+ "type": "object",
+ "enabled": false
+ },
+ "id": {
+ "type": "keyword"
+ }
+ }
+ }
+ }
+ },
+ "tags": {
+ "ignore_above": 1024,
+ "type": "keyword",
+ "meta": {
+ "isArray": "true"
+ }
+ },
+ "message": {
+ "norms": false,
+ "type": "text"
+ },
+ "event": {
+ "properties": {
+ "kind": {
+ "type": "keyword"
+ }
+ }
+ },
+ "rule": {
+ "properties": {
+ "author": {
+ "type": "keyword"
+ },
+ "category": {
+ "type": "keyword"
+ },
+ "id": {
+ "type": "keyword"
+ },
+ "license": {
+ "type": "keyword"
+ },
+ "name": {
+ "type": "text",
+ "fields": {
+ "keyword": {
+ "type": "keyword"
+ }
+ }
+ },
+ "params": {
+ "type": "object",
+ "enabled": false
+ },
+ "space": {
+ "type": "keyword"
+ },
+ "type": {
+ "type": "keyword"
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugins/actions/server/types.ts b/x-pack/plugins/actions/server/types.ts
index b7a6750a520ea..d6f99a766ed34 100644
--- a/x-pack/plugins/actions/server/types.ts
+++ b/x-pack/plugins/actions/server/types.ts
@@ -107,7 +107,11 @@ export interface ActionType<
config?: ValidatorType;
secrets?: ValidatorType;
};
- renderParameterTemplates?(params: Params, variables: Record): Params;
+ renderParameterTemplates?(
+ params: Params,
+ variables: Record,
+ actionId?: string
+ ): Params;
executor: ExecutorType;
}
diff --git a/x-pack/plugins/alerting/server/task_runner/create_execution_handler.ts b/x-pack/plugins/alerting/server/task_runner/create_execution_handler.ts
index 9999ea6a4d3d7..2ecf540485695 100644
--- a/x-pack/plugins/alerting/server/task_runner/create_execution_handler.ts
+++ b/x-pack/plugins/alerting/server/task_runner/create_execution_handler.ts
@@ -117,6 +117,7 @@ export function createExecutionHandler<
params: transformActionParams({
actionsPlugin,
alertId,
+ alertType: alertType.id,
actionTypeId: action.actionTypeId,
alertName,
spaceId,
@@ -127,6 +128,7 @@ export function createExecutionHandler<
alertActionSubgroup: actionSubgroup,
context,
actionParams: action.params,
+ actionId: action.id,
state,
kibanaBaseUrl,
alertParams,
diff --git a/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts b/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
index a3a7e9bbd9da5..50d710f6d6b14 100644
--- a/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
+++ b/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
@@ -153,7 +153,7 @@ describe('Task Runner', () => {
actionsClient
);
taskRunnerFactoryInitializerParams.actionsPlugin.renderActionParameterTemplates.mockImplementation(
- (actionTypeId, params) => params
+ (actionTypeId, actionId, params) => params
);
});
diff --git a/x-pack/plugins/alerting/server/task_runner/transform_action_params.test.ts b/x-pack/plugins/alerting/server/task_runner/transform_action_params.test.ts
index 6379192e855d7..e325d597da145 100644
--- a/x-pack/plugins/alerting/server/task_runner/transform_action_params.test.ts
+++ b/x-pack/plugins/alerting/server/task_runner/transform_action_params.test.ts
@@ -34,6 +34,8 @@ test('skips non string parameters', () => {
context: {},
state: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -68,6 +70,8 @@ test('missing parameters get emptied out', () => {
context: {},
state: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -95,6 +99,8 @@ test('context parameters are passed to templates', () => {
state: {},
context: { foo: 'fooVal' },
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -121,6 +127,8 @@ test('state parameters are passed to templates', () => {
state: { bar: 'barVal' },
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -147,6 +155,8 @@ test('alertId is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -173,6 +183,8 @@ test('alertName is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -199,6 +211,8 @@ test('tags is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -225,6 +239,8 @@ test('undefined tags is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
spaceId: 'spaceId-A',
alertInstanceId: '2',
@@ -250,6 +266,8 @@ test('empty tags is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: [],
spaceId: 'spaceId-A',
@@ -276,6 +294,8 @@ test('spaceId is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -302,6 +322,8 @@ test('alertInstanceId is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -328,6 +350,8 @@ test('alertActionGroup is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -354,6 +378,8 @@ test('alertActionGroupName is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -380,6 +406,8 @@ test('rule variables are passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -408,6 +436,8 @@ test('rule alert variables are passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -436,6 +466,8 @@ test('date is passed to templates', () => {
state: {},
context: {},
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -464,6 +496,8 @@ test('works recursively', () => {
state: { value: 'state' },
context: { value: 'context' },
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
@@ -494,6 +528,8 @@ test('works recursively with arrays', () => {
state: { value: 'state' },
context: { value: 'context' },
alertId: '1',
+ alertType: 'rule-type-id',
+ actionId: 'action-id',
alertName: 'alert-name',
tags: ['tag-A', 'tag-B'],
spaceId: 'spaceId-A',
diff --git a/x-pack/plugins/alerting/server/task_runner/transform_action_params.ts b/x-pack/plugins/alerting/server/task_runner/transform_action_params.ts
index 348bf01ea874b..3f9fe9e9c59e0 100644
--- a/x-pack/plugins/alerting/server/task_runner/transform_action_params.ts
+++ b/x-pack/plugins/alerting/server/task_runner/transform_action_params.ts
@@ -16,6 +16,8 @@ import { PluginStartContract as ActionsPluginStartContract } from '../../../acti
interface TransformActionParamsOptions {
actionsPlugin: ActionsPluginStartContract;
alertId: string;
+ alertType: string;
+ actionId: string;
actionTypeId: string;
alertName: string;
spaceId: string;
@@ -34,6 +36,8 @@ interface TransformActionParamsOptions {
export function transformActionParams({
actionsPlugin,
alertId,
+ alertType,
+ actionId,
actionTypeId,
alertName,
spaceId,
@@ -68,6 +72,7 @@ export function transformActionParams({
rule: {
id: alertId,
name: alertName,
+ type: alertType,
spaceId,
tags,
},
@@ -78,5 +83,10 @@ export function transformActionParams({
actionSubgroup: alertActionSubgroup,
},
};
- return actionsPlugin.renderActionParameterTemplates(actionTypeId, actionParams, variables);
+ return actionsPlugin.renderActionParameterTemplates(
+ actionTypeId,
+ actionId,
+ actionParams,
+ variables
+ );
}
diff --git a/x-pack/plugins/apm/common/environment_filter_values.ts b/x-pack/plugins/apm/common/environment_filter_values.ts
index e091b53b2e5b8..c80541ee1ba6b 100644
--- a/x-pack/plugins/apm/common/environment_filter_values.ts
+++ b/x-pack/plugins/apm/common/environment_filter_values.ts
@@ -22,11 +22,13 @@ const environmentLabels: Record = {
};
export const ENVIRONMENT_ALL = {
+ esFieldValue: undefined,
value: ENVIRONMENT_ALL_VALUE,
text: environmentLabels[ENVIRONMENT_ALL_VALUE],
};
export const ENVIRONMENT_NOT_DEFINED = {
+ esFieldValue: undefined,
value: ENVIRONMENT_NOT_DEFINED_VALUE,
text: environmentLabels[ENVIRONMENT_NOT_DEFINED_VALUE],
};
@@ -35,6 +37,22 @@ export function getEnvironmentLabel(environment: string) {
return environmentLabels[environment] || environment;
}
+export function parseEnvironmentUrlParam(environment: string) {
+ if (environment === ENVIRONMENT_ALL_VALUE) {
+ return ENVIRONMENT_ALL;
+ }
+
+ if (environment === ENVIRONMENT_NOT_DEFINED_VALUE) {
+ return ENVIRONMENT_NOT_DEFINED;
+ }
+
+ return {
+ esFieldValue: environment,
+ value: environment,
+ text: environment,
+ };
+}
+
// returns the environment url param that should be used
// based on the requested environment. If the requested
// environment is different from the URL parameter, we'll
diff --git a/x-pack/plugins/apm/common/latency_aggregation_types.ts b/x-pack/plugins/apm/common/latency_aggregation_types.ts
index d9db58f223144..964d6f4ed1015 100644
--- a/x-pack/plugins/apm/common/latency_aggregation_types.ts
+++ b/x-pack/plugins/apm/common/latency_aggregation_types.ts
@@ -14,7 +14,7 @@ export enum LatencyAggregationType {
}
export const latencyAggregationTypeRt = t.union([
- t.literal('avg'),
- t.literal('p95'),
- t.literal('p99'),
+ t.literal(LatencyAggregationType.avg),
+ t.literal(LatencyAggregationType.p95),
+ t.literal(LatencyAggregationType.p99),
]);
diff --git a/x-pack/plugins/apm/common/runtime_types/iso_to_epoch_rt/index.ts b/x-pack/plugins/apm/common/runtime_types/iso_to_epoch_rt/index.ts
index 1a17f82a52141..970e39bc4f86f 100644
--- a/x-pack/plugins/apm/common/runtime_types/iso_to_epoch_rt/index.ts
+++ b/x-pack/plugins/apm/common/runtime_types/iso_to_epoch_rt/index.ts
@@ -21,8 +21,5 @@ export const isoToEpochRt = new t.Type(
? t.failure(input, context)
: t.success(epochDate);
}),
- (a) => {
- const d = new Date(a);
- return d.toISOString();
- }
+ (output) => new Date(output).toISOString()
);
diff --git a/x-pack/plugins/apm/kibana.json b/x-pack/plugins/apm/kibana.json
index e340f8bf19126..28e4a7b36e740 100644
--- a/x-pack/plugins/apm/kibana.json
+++ b/x-pack/plugins/apm/kibana.json
@@ -9,7 +9,8 @@
"licensing",
"triggersActionsUi",
"embeddable",
- "infra"
+ "infra",
+ "observability"
],
"optionalPlugins": [
"spaces",
@@ -18,7 +19,6 @@
"taskManager",
"actions",
"alerting",
- "observability",
"security",
"ml",
"home",
diff --git a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/CreateEditCustomLinkFlyout/link_preview.test.tsx b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/CreateEditCustomLinkFlyout/link_preview.test.tsx
index 6a6db40892e10..407f460f25ad3 100644
--- a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/CreateEditCustomLinkFlyout/link_preview.test.tsx
+++ b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/CreateEditCustomLinkFlyout/link_preview.test.tsx
@@ -14,15 +14,18 @@ import {
act,
waitFor,
} from '@testing-library/react';
-import * as apmApi from '../../../../../../services/rest/createCallApmApi';
+import {
+ getCallApmApiSpy,
+ CallApmApiSpy,
+} from '../../../../../../services/rest/callApmApiSpy';
export const removeExternalLinkText = (str: string) =>
str.replace(/\(opens in a new tab or window\)/g, '');
describe('LinkPreview', () => {
- let callApmApiSpy: jest.SpyInstance;
+ let callApmApiSpy: CallApmApiSpy;
beforeAll(() => {
- callApmApiSpy = jest.spyOn(apmApi, 'callApmApi').mockResolvedValue({
+ callApmApiSpy = getCallApmApiSpy().mockResolvedValue({
transaction: { id: 'foo' },
});
});
diff --git a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx
index 77835afef863a..7d119b8c406da 100644
--- a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx
+++ b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx
@@ -8,6 +8,7 @@
import { fireEvent, render, RenderResult } from '@testing-library/react';
import React from 'react';
import { act } from 'react-dom/test-utils';
+import { getCallApmApiSpy } from '../../../../../services/rest/callApmApiSpy';
import { CustomLinkOverview } from '.';
import { License } from '../../../../../../../licensing/common/license';
import { ApmPluginContextValue } from '../../../../../context/apm_plugin/apm_plugin_context';
@@ -17,7 +18,6 @@ import {
} from '../../../../../context/apm_plugin/mock_apm_plugin_context';
import { LicenseContext } from '../../../../../context/license/license_context';
import * as hooks from '../../../../../hooks/use_fetcher';
-import * as apmApi from '../../../../../services/rest/createCallApmApi';
import {
expectTextsInDocument,
expectTextsNotInDocument,
@@ -43,7 +43,7 @@ function getMockAPMContext({ canSave }: { canSave: boolean }) {
describe('CustomLink', () => {
beforeAll(() => {
- jest.spyOn(apmApi, 'callApmApi').mockResolvedValue({});
+ getCallApmApiSpy().mockResolvedValue({});
});
afterAll(() => {
jest.resetAllMocks();
diff --git a/x-pack/plugins/apm/public/components/app/service_overview/service_overview.test.tsx b/x-pack/plugins/apm/public/components/app/service_overview/service_overview.test.tsx
index b30faac7a65af..c6ed4e640693f 100644
--- a/x-pack/plugins/apm/public/components/app/service_overview/service_overview.test.tsx
+++ b/x-pack/plugins/apm/public/components/app/service_overview/service_overview.test.tsx
@@ -22,9 +22,12 @@ import * as useTransactionBreakdownHooks from '../../shared/charts/transaction_b
import { renderWithTheme } from '../../../utils/testHelpers';
import { ServiceOverview } from './';
import { waitFor } from '@testing-library/dom';
-import * as callApmApiModule from '../../../services/rest/createCallApmApi';
import * as useApmServiceContextHooks from '../../../context/apm_service/use_apm_service_context';
import { LatencyAggregationType } from '../../../../common/latency_aggregation_types';
+import {
+ getCallApmApiSpy,
+ getCreateCallApmApiSpy,
+} from '../../../services/rest/callApmApiSpy';
const KibanaReactContext = createKibanaReactContext({
usageCollection: { reportUiCounter: () => {} },
@@ -83,10 +86,10 @@ describe('ServiceOverview', () => {
/* eslint-disable @typescript-eslint/naming-convention */
const calls = {
'GET /api/apm/services/{serviceName}/error_groups/primary_statistics': {
- error_groups: [],
+ error_groups: [] as any[],
},
'GET /api/apm/services/{serviceName}/transactions/groups/primary_statistics': {
- transactionGroups: [],
+ transactionGroups: [] as any[],
totalTransactionGroups: 0,
isAggregationAccurate: true,
},
@@ -95,19 +98,17 @@ describe('ServiceOverview', () => {
};
/* eslint-enable @typescript-eslint/naming-convention */
- jest
- .spyOn(callApmApiModule, 'createCallApmApi')
- .mockImplementation(() => {});
-
- const callApmApi = jest
- .spyOn(callApmApiModule, 'callApmApi')
- .mockImplementation(({ endpoint }) => {
+ const callApmApiSpy = getCallApmApiSpy().mockImplementation(
+ ({ endpoint }) => {
const response = calls[endpoint as keyof typeof calls];
return response
? Promise.resolve(response)
: Promise.reject(`Response for ${endpoint} is not defined`);
- });
+ }
+ );
+
+ getCreateCallApmApiSpy().mockImplementation(() => callApmApiSpy as any);
jest
.spyOn(useTransactionBreakdownHooks, 'useTransactionBreakdown')
.mockReturnValue({
@@ -124,7 +125,7 @@ describe('ServiceOverview', () => {
);
await waitFor(() =>
- expect(callApmApi).toHaveBeenCalledTimes(Object.keys(calls).length)
+ expect(callApmApiSpy).toHaveBeenCalledTimes(Object.keys(calls).length)
);
expect((await findAllByText('Latency')).length).toBeGreaterThan(0);
diff --git a/x-pack/plugins/apm/public/services/rest/apm_observability_overview_fetchers.test.ts b/x-pack/plugins/apm/public/services/rest/apm_observability_overview_fetchers.test.ts
index 29fabc51fd582..00447607cf787 100644
--- a/x-pack/plugins/apm/public/services/rest/apm_observability_overview_fetchers.test.ts
+++ b/x-pack/plugins/apm/public/services/rest/apm_observability_overview_fetchers.test.ts
@@ -10,10 +10,10 @@ import {
fetchObservabilityOverviewPageData,
getHasData,
} from './apm_observability_overview_fetchers';
-import * as createCallApmApi from './createCallApmApi';
+import { getCallApmApiSpy } from './callApmApiSpy';
describe('Observability dashboard data', () => {
- const callApmApiMock = jest.spyOn(createCallApmApi, 'callApmApi');
+ const callApmApiMock = getCallApmApiSpy();
const params = {
absoluteTime: {
start: moment('2020-07-02T13:25:11.629Z').valueOf(),
@@ -84,7 +84,7 @@ describe('Observability dashboard data', () => {
callApmApiMock.mockImplementation(() =>
Promise.resolve({
serviceCount: 0,
- transactionPerMinute: { value: null, timeseries: [] },
+ transactionPerMinute: { value: null, timeseries: [] as any },
})
);
const response = await fetchObservabilityOverviewPageData(params);
diff --git a/x-pack/plugins/apm/public/services/rest/callApmApiSpy.ts b/x-pack/plugins/apm/public/services/rest/callApmApiSpy.ts
new file mode 100644
index 0000000000000..ba9f740e06d0d
--- /dev/null
+++ b/x-pack/plugins/apm/public/services/rest/callApmApiSpy.ts
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import * as createCallApmApi from './createCallApmApi';
+import type { AbstractAPMClient } from './createCallApmApi';
+
+export type CallApmApiSpy = jest.SpyInstance<
+ Promise,
+ Parameters
+>;
+
+export type CreateCallApmApiSpy = jest.SpyInstance;
+
+export const getCreateCallApmApiSpy = () =>
+ (jest.spyOn(
+ createCallApmApi,
+ 'createCallApmApi'
+ ) as unknown) as CreateCallApmApiSpy;
+export const getCallApmApiSpy = () =>
+ (jest.spyOn(createCallApmApi, 'callApmApi') as unknown) as CallApmApiSpy;
diff --git a/x-pack/plugins/apm/public/services/rest/createCallApmApi.ts b/x-pack/plugins/apm/public/services/rest/createCallApmApi.ts
index b0cce3296fe21..0e82d70faf1e1 100644
--- a/x-pack/plugins/apm/public/services/rest/createCallApmApi.ts
+++ b/x-pack/plugins/apm/public/services/rest/createCallApmApi.ts
@@ -6,30 +6,68 @@
*/
import { CoreSetup, CoreStart } from 'kibana/public';
-import { parseEndpoint } from '../../../common/apm_api/parse_endpoint';
+import * as t from 'io-ts';
+import type {
+ ClientRequestParamsOf,
+ EndpointOf,
+ ReturnOf,
+ RouteRepositoryClient,
+ ServerRouteRepository,
+ ServerRoute,
+} from '@kbn/server-route-repository';
+import { formatRequest } from '@kbn/server-route-repository/target/format_request';
import { FetchOptions } from '../../../common/fetch_options';
import { callApi } from './callApi';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import type { APMAPI } from '../../../server/routes/create_apm_api';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import type { Client } from '../../../server/routes/typings';
-
-export type APMClient = Client;
-export type AutoAbortedAPMClient = Client;
+import type {
+ APMServerRouteRepository,
+ InspectResponse,
+ APMRouteHandlerResources,
+ // eslint-disable-next-line @kbn/eslint/no-restricted-paths
+} from '../../../server';
export type APMClientOptions = Omit<
FetchOptions,
'query' | 'body' | 'pathname' | 'signal'
> & {
- endpoint: string;
signal: AbortSignal | null;
- params?: {
- body?: any;
- query?: Record;
- path?: Record;
- };
};
+export type APMClient = RouteRepositoryClient<
+ APMServerRouteRepository,
+ APMClientOptions
+>;
+
+export type AutoAbortedAPMClient = RouteRepositoryClient<
+ APMServerRouteRepository,
+ Omit
+>;
+
+export type APIReturnType<
+ TEndpoint extends EndpointOf
+> = ReturnOf & {
+ _inspect?: InspectResponse;
+};
+
+export type APIEndpoint = EndpointOf;
+
+export type APIClientRequestParamsOf<
+ TEndpoint extends EndpointOf
+> = ClientRequestParamsOf;
+
+export type AbstractAPMRepository = ServerRouteRepository<
+ APMRouteHandlerResources,
+ {},
+ Record<
+ string,
+ ServerRoute
+ >
+>;
+
+export type AbstractAPMClient = RouteRepositoryClient<
+ AbstractAPMRepository,
+ APMClientOptions
+>;
+
export let callApmApi: APMClient = () => {
throw new Error(
'callApmApi has to be initialized before used. Call createCallApmApi first.'
@@ -37,9 +75,13 @@ export let callApmApi: APMClient = () => {
};
export function createCallApmApi(core: CoreStart | CoreSetup) {
- callApmApi = ((options: APMClientOptions) => {
- const { endpoint, params, ...opts } = options;
- const { method, pathname } = parseEndpoint(endpoint, params?.path);
+ callApmApi = ((options) => {
+ const { endpoint, ...opts } = options;
+ const { params } = (options as unknown) as {
+ params?: Partial>;
+ };
+
+ const { method, pathname } = formatRequest(endpoint, params?.path);
return callApi(core, {
...opts,
@@ -50,10 +92,3 @@ export function createCallApmApi(core: CoreStart | CoreSetup) {
});
}) as APMClient;
}
-
-// infer return type from API
-export type APIReturnType<
- TPath extends keyof APMAPI['_S']
-> = APMAPI['_S'][TPath] extends { ret: any }
- ? APMAPI['_S'][TPath]['ret']
- : unknown;
diff --git a/x-pack/plugins/apm/scripts/optimize-tsconfig/tsconfig.json b/x-pack/plugins/apm/scripts/optimize-tsconfig/tsconfig.json
index 319eb53313231..40d42298b967b 100644
--- a/x-pack/plugins/apm/scripts/optimize-tsconfig/tsconfig.json
+++ b/x-pack/plugins/apm/scripts/optimize-tsconfig/tsconfig.json
@@ -2,6 +2,7 @@
"include": [
"./x-pack/plugins/apm/**/*",
"./x-pack/plugins/observability/**/*",
+ "./x-pack/plugins/rule_registry/**/*",
"./typings/**/*"
],
"exclude": [
diff --git a/x-pack/plugins/apm/scripts/precommit.js b/x-pack/plugins/apm/scripts/precommit.js
index 695a9ba70f5d7..88d2e169dd542 100644
--- a/x-pack/plugins/apm/scripts/precommit.js
+++ b/x-pack/plugins/apm/scripts/precommit.js
@@ -28,19 +28,8 @@ const testTsconfig = resolve(root, 'x-pack/test/tsconfig.json');
const tasks = new Listr(
[
{
- title: 'Jest',
- task: () =>
- execa(
- 'node',
- [
- resolve(__dirname, './jest.js'),
- '--reporters',
- resolve(__dirname, '../../../../node_modules/jest-silent-reporter'),
- '--collect-coverage',
- 'false',
- ],
- execaOpts
- ),
+ title: 'Lint',
+ task: () => execa('node', [resolve(__dirname, 'eslint.js')], execaOpts),
},
{
title: 'Typescript',
@@ -72,11 +61,22 @@ const tasks = new Listr(
),
},
{
- title: 'Lint',
- task: () => execa('node', [resolve(__dirname, 'eslint.js')], execaOpts),
+ title: 'Jest',
+ task: () =>
+ execa(
+ 'node',
+ [
+ resolve(__dirname, './jest.js'),
+ '--reporters',
+ resolve(__dirname, '../../../../node_modules/jest-silent-reporter'),
+ '--collect-coverage',
+ 'false',
+ ],
+ execaOpts
+ ),
},
],
- { exitOnError: true, concurrent: true }
+ { exitOnError: true, concurrent: false }
);
tasks.run().catch((error) => {
diff --git a/x-pack/plugins/apm/server/index.ts b/x-pack/plugins/apm/server/index.ts
index 00910353ac278..9ab56c1a303ea 100644
--- a/x-pack/plugins/apm/server/index.ts
+++ b/x-pack/plugins/apm/server/index.ts
@@ -120,5 +120,9 @@ export function mergeConfigs(
export const plugin = (initContext: PluginInitializerContext) =>
new APMPlugin(initContext);
-export { APMPlugin, APMPluginSetup } from './plugin';
+export { APMPlugin } from './plugin';
+export { APMPluginSetup } from './types';
+export { APMServerRouteRepository } from './routes/get_global_apm_server_route_repository';
+export { InspectResponse, APMRouteHandlerResources } from './routes/typings';
+
export type { ProcessorEvent } from '../common/processor_event';
diff --git a/x-pack/plugins/apm/server/lib/alerts/action_variables.ts b/x-pack/plugins/apm/server/lib/alerts/action_variables.ts
index 473912c4177a9..b065da7123dec 100644
--- a/x-pack/plugins/apm/server/lib/alerts/action_variables.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/action_variables.ts
@@ -13,28 +13,28 @@ export const apmActionVariables = {
'xpack.apm.alerts.action_variables.serviceName',
{ defaultMessage: 'The service the alert is created for' }
),
- name: 'serviceName',
+ name: 'serviceName' as const,
},
transactionType: {
description: i18n.translate(
'xpack.apm.alerts.action_variables.transactionType',
{ defaultMessage: 'The transaction type the alert is created for' }
),
- name: 'transactionType',
+ name: 'transactionType' as const,
},
environment: {
description: i18n.translate(
'xpack.apm.alerts.action_variables.environment',
{ defaultMessage: 'The transaction type the alert is created for' }
),
- name: 'environment',
+ name: 'environment' as const,
},
threshold: {
description: i18n.translate('xpack.apm.alerts.action_variables.threshold', {
defaultMessage:
'Any trigger value above this value will cause the alert to fire',
}),
- name: 'threshold',
+ name: 'threshold' as const,
},
triggerValue: {
description: i18n.translate(
@@ -44,7 +44,7 @@ export const apmActionVariables = {
'The value that breached the threshold and triggered the alert',
}
),
- name: 'triggerValue',
+ name: 'triggerValue' as const,
},
interval: {
description: i18n.translate(
@@ -54,6 +54,6 @@ export const apmActionVariables = {
'The length and unit of the time period where the alert conditions were met',
}
),
- name: 'interval',
+ name: 'interval' as const,
},
};
diff --git a/x-pack/plugins/apm/server/lib/alerts/alerting_es_client.ts b/x-pack/plugins/apm/server/lib/alerts/alerting_es_client.ts
index 9a0ba514bb479..e3d5e5481caa5 100644
--- a/x-pack/plugins/apm/server/lib/alerts/alerting_es_client.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/alerting_es_client.ts
@@ -5,28 +5,24 @@
* 2.0.
*/
-import { ApiResponse } from '@elastic/elasticsearch';
-import { ThresholdMetActionGroupId } from '../../../common/alert_types';
import {
ESSearchRequest,
ESSearchResponse,
} from '../../../../../../typings/elasticsearch';
-import {
- AlertInstanceContext,
- AlertInstanceState,
- AlertServices,
-} from '../../../../alerting/server';
+import { AlertServices } from '../../../../alerting/server';
-export function alertingEsClient(
- services: AlertServices<
- AlertInstanceState,
- AlertInstanceContext,
- ThresholdMetActionGroupId
- >,
+export async function alertingEsClient(
+ scopedClusterClient: AlertServices<
+ never,
+ never,
+ never
+ >['scopedClusterClient'],
params: TParams
-): Promise>> {
- return (services.scopedClusterClient.asCurrentUser.search({
+): Promise> {
+ const response = await scopedClusterClient.asCurrentUser.search({
...params,
ignore_unavailable: true,
- }) as unknown) as Promise>>;
+ });
+
+ return (response.body as unknown) as ESSearchResponse;
}
diff --git a/x-pack/plugins/apm/server/lib/alerts/create_apm_lifecycle_rule_type.ts b/x-pack/plugins/apm/server/lib/alerts/create_apm_lifecycle_rule_type.ts
new file mode 100644
index 0000000000000..8d250a5765cce
--- /dev/null
+++ b/x-pack/plugins/apm/server/lib/alerts/create_apm_lifecycle_rule_type.ts
@@ -0,0 +1,11 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { createLifecycleRuleTypeFactory } from '../../../../rule_registry/server';
+import { APMRuleRegistry } from '../../plugin';
+
+export const createAPMLifecycleRuleType = createLifecycleRuleTypeFactory();
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_apm_alerts.ts b/x-pack/plugins/apm/server/lib/alerts/register_apm_alerts.ts
index a9824c130faa5..9a362efa90ac0 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_apm_alerts.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_apm_alerts.ts
@@ -6,38 +6,25 @@
*/
import { Observable } from 'rxjs';
-import { AlertingPlugin } from '../../../../alerting/server';
-import { ActionsPlugin } from '../../../../actions/server';
+import { Logger } from 'kibana/server';
import { registerTransactionDurationAlertType } from './register_transaction_duration_alert_type';
import { registerTransactionDurationAnomalyAlertType } from './register_transaction_duration_anomaly_alert_type';
import { registerErrorCountAlertType } from './register_error_count_alert_type';
import { APMConfig } from '../..';
import { MlPluginSetup } from '../../../../ml/server';
import { registerTransactionErrorRateAlertType } from './register_transaction_error_rate_alert_type';
+import { APMRuleRegistry } from '../../plugin';
-interface Params {
- alerting: AlertingPlugin['setup'];
- actions: ActionsPlugin['setup'];
+export interface RegisterRuleDependencies {
+ registry: APMRuleRegistry;
ml?: MlPluginSetup;
config$: Observable;
+ logger: Logger;
}
-export function registerApmAlerts(params: Params) {
- registerTransactionDurationAlertType({
- alerting: params.alerting,
- config$: params.config$,
- });
- registerTransactionDurationAnomalyAlertType({
- alerting: params.alerting,
- ml: params.ml,
- config$: params.config$,
- });
- registerErrorCountAlertType({
- alerting: params.alerting,
- config$: params.config$,
- });
- registerTransactionErrorRateAlertType({
- alerting: params.alerting,
- config$: params.config$,
- });
+export function registerApmAlerts(dependencies: RegisterRuleDependencies) {
+ registerTransactionDurationAlertType(dependencies);
+ registerTransactionDurationAnomalyAlertType(dependencies);
+ registerErrorCountAlertType(dependencies);
+ registerTransactionErrorRateAlertType(dependencies);
}
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.test.ts b/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.test.ts
index d7dd7aee3ca25..5758dea1860b2 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.test.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.test.ts
@@ -5,50 +5,17 @@
* 2.0.
*/
-import { Observable } from 'rxjs';
-import * as Rx from 'rxjs';
-import { toArray, map } from 'rxjs/operators';
-
-import { AlertingPlugin } from '../../../../alerting/server';
-import { APMConfig } from '../..';
-
import { registerErrorCountAlertType } from './register_error_count_alert_type';
-import { elasticsearchServiceMock } from 'src/core/server/mocks';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { elasticsearchClientMock } from 'src/core/server/elasticsearch/client/mocks';
-
-type Operator = (source: Rx.Observable) => Rx.Observable;
-const pipeClosure = (fn: Operator): Operator => {
- return (source: Rx.Observable) => {
- return Rx.defer(() => fn(source));
- };
-};
-const mockedConfig$ = (Rx.of('apm_oss.errorIndices').pipe(
- pipeClosure((source$) => {
- return source$.pipe(map((i) => i));
- }),
- toArray()
-) as unknown) as Observable;
+import { createRuleTypeMocks } from './test_utils';
describe('Error count alert', () => {
it("doesn't send an alert when error count is less than threshold", async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ const { services, dependencies, executor } = createRuleTypeMocks();
- registerErrorCountAlertType({
- alerting,
- config$: mockedConfig$,
- });
- expect(alertExecutor).toBeDefined();
+ registerErrorCountAlertType(dependencies);
- const services = {
- scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
- alertInstanceFactory: jest.fn(),
- };
const params = { threshold: 1 };
services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
@@ -71,30 +38,21 @@ describe('Error count alert', () => {
})
);
- await alertExecutor!({ services, params });
+ await executor({ params });
expect(services.alertInstanceFactory).not.toBeCalled();
});
- it('sends alerts with service name and environment', async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ it('sends alerts with service name and environment for those that exceeded the threshold', async () => {
+ const {
+ services,
+ dependencies,
+ executor,
+ scheduleActions,
+ } = createRuleTypeMocks();
- registerErrorCountAlertType({
- alerting,
- config$: mockedConfig$,
- });
- expect(alertExecutor).toBeDefined();
+ registerErrorCountAlertType(dependencies);
- const scheduleActions = jest.fn();
- const services = {
- scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
- alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
- };
- const params = { threshold: 1, windowSize: 5, windowUnit: 'm' };
+ const params = { threshold: 2, windowSize: 5, windowUnit: 'm' };
services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
elasticsearchClientMock.createSuccessTransportRequestPromise({
@@ -106,18 +64,62 @@ describe('Error count alert', () => {
},
},
aggregations: {
- services: {
+ error_counts: {
buckets: [
{
- key: 'foo',
- environments: {
- buckets: [{ key: 'env-foo' }, { key: 'env-foo-2' }],
+ key: ['foo', 'env-foo'],
+ doc_count: 5,
+ latest: {
+ top: [
+ {
+ metrics: {
+ 'service.name': 'foo',
+ 'service.environment': 'env-foo',
+ },
+ },
+ ],
+ },
+ },
+ {
+ key: ['foo', 'env-foo-2'],
+ doc_count: 4,
+ latest: {
+ top: [
+ {
+ metrics: {
+ 'service.name': 'foo',
+ 'service.environment': 'env-foo-2',
+ },
+ },
+ ],
},
},
{
- key: 'bar',
- environments: {
- buckets: [{ key: 'env-bar' }, { key: 'env-bar-2' }],
+ key: ['bar', 'env-bar'],
+ doc_count: 3,
+ latest: {
+ top: [
+ {
+ metrics: {
+ 'service.name': 'bar',
+ 'service.environment': 'env-bar',
+ },
+ },
+ ],
+ },
+ },
+ {
+ key: ['bar', 'env-bar-2'],
+ doc_count: 1,
+ latest: {
+ top: [
+ {
+ metrics: {
+ 'service.name': 'bar',
+ 'service.environment': 'env-bar-2',
+ },
+ },
+ ],
},
},
],
@@ -134,115 +136,36 @@ describe('Error count alert', () => {
})
);
- await alertExecutor!({ services, params });
+ await executor({ params });
[
'apm.error_rate_foo_env-foo',
'apm.error_rate_foo_env-foo-2',
'apm.error_rate_bar_env-bar',
- 'apm.error_rate_bar_env-bar-2',
].forEach((instanceName) =>
expect(services.alertInstanceFactory).toHaveBeenCalledWith(instanceName)
);
+ expect(scheduleActions).toHaveBeenCalledTimes(3);
+
expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
serviceName: 'foo',
environment: 'env-foo',
- threshold: 1,
- triggerValue: 2,
+ threshold: 2,
+ triggerValue: 5,
interval: '5m',
});
expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
serviceName: 'foo',
environment: 'env-foo-2',
- threshold: 1,
- triggerValue: 2,
+ threshold: 2,
+ triggerValue: 4,
interval: '5m',
});
expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
serviceName: 'bar',
environment: 'env-bar',
- threshold: 1,
- triggerValue: 2,
- interval: '5m',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- environment: 'env-bar-2',
- threshold: 1,
- triggerValue: 2,
- interval: '5m',
- });
- });
- it('sends alerts with service name', async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
-
- registerErrorCountAlertType({
- alerting,
- config$: mockedConfig$,
- });
- expect(alertExecutor).toBeDefined();
-
- const scheduleActions = jest.fn();
- const services = {
- scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
- alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
- };
- const params = { threshold: 1, windowSize: 5, windowUnit: 'm' };
-
- services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
- elasticsearchClientMock.createSuccessTransportRequestPromise({
- hits: {
- hits: [],
- total: {
- relation: 'eq',
- value: 2,
- },
- },
- aggregations: {
- services: {
- buckets: [
- {
- key: 'foo',
- },
- {
- key: 'bar',
- },
- ],
- },
- },
- took: 0,
- timed_out: false,
- _shards: {
- failed: 0,
- skipped: 0,
- successful: 1,
- total: 1,
- },
- })
- );
-
- await alertExecutor!({ services, params });
- ['apm.error_rate_foo', 'apm.error_rate_bar'].forEach((instanceName) =>
- expect(services.alertInstanceFactory).toHaveBeenCalledWith(instanceName)
- );
-
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'foo',
- environment: undefined,
- threshold: 1,
- triggerValue: 2,
- interval: '5m',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- environment: undefined,
- threshold: 1,
- triggerValue: 2,
+ threshold: 2,
+ triggerValue: 3,
interval: '5m',
});
});
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.ts b/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.ts
index 0120891a8f868..8240e0c369d1f 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_error_count_alert_type.ts
@@ -5,22 +5,11 @@
* 2.0.
*/
-import { schema, TypeOf } from '@kbn/config-schema';
-import { isEmpty } from 'lodash';
-import { Observable } from 'rxjs';
+import { schema } from '@kbn/config-schema';
import { take } from 'rxjs/operators';
-import { APMConfig } from '../..';
-import {
- AlertingPlugin,
- AlertInstanceContext,
- AlertInstanceState,
- AlertTypeState,
-} from '../../../../alerting/server';
-import {
- AlertType,
- ALERT_TYPES_CONFIG,
- ThresholdMetActionGroupId,
-} from '../../../common/alert_types';
+import { ENVIRONMENT_NOT_DEFINED } from '../../../common/environment_filter_values';
+import { asMutableArray } from '../../../common/utils/as_mutable_array';
+import { AlertType, ALERT_TYPES_CONFIG } from '../../../common/alert_types';
import {
PROCESSOR_EVENT,
SERVICE_ENVIRONMENT,
@@ -31,11 +20,8 @@ import { environmentQuery } from '../../../server/utils/queries';
import { getApmIndices } from '../settings/apm_indices/get_apm_indices';
import { apmActionVariables } from './action_variables';
import { alertingEsClient } from './alerting_es_client';
-
-interface RegisterAlertParams {
- alerting: AlertingPlugin['setup'];
- config$: Observable;
-}
+import { RegisterRuleDependencies } from './register_apm_alerts';
+import { createAPMLifecycleRuleType } from './create_apm_lifecycle_rule_type';
const paramsSchema = schema.object({
windowSize: schema.number(),
@@ -48,127 +34,131 @@ const paramsSchema = schema.object({
const alertTypeConfig = ALERT_TYPES_CONFIG[AlertType.ErrorCount];
export function registerErrorCountAlertType({
- alerting,
+ registry,
config$,
-}: RegisterAlertParams) {
- alerting.registerType<
- TypeOf,
- AlertTypeState,
- AlertInstanceState,
- AlertInstanceContext,
- ThresholdMetActionGroupId
- >({
- id: AlertType.ErrorCount,
- name: alertTypeConfig.name,
- actionGroups: alertTypeConfig.actionGroups,
- defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
- validate: {
- params: paramsSchema,
- },
- actionVariables: {
- context: [
- apmActionVariables.serviceName,
- apmActionVariables.environment,
- apmActionVariables.threshold,
- apmActionVariables.triggerValue,
- apmActionVariables.interval,
- ],
- },
- producer: 'apm',
- minimumLicenseRequired: 'basic',
- executor: async ({ services, params }) => {
- const config = await config$.pipe(take(1)).toPromise();
- const alertParams = params;
- const indices = await getApmIndices({
- config,
- savedObjectsClient: services.savedObjectsClient,
- });
- const maxServiceEnvironments = config['xpack.apm.maxServiceEnvironments'];
+}: RegisterRuleDependencies) {
+ registry.registerType(
+ createAPMLifecycleRuleType({
+ id: AlertType.ErrorCount,
+ name: alertTypeConfig.name,
+ actionGroups: alertTypeConfig.actionGroups,
+ defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
+ validate: {
+ params: paramsSchema,
+ },
+ actionVariables: {
+ context: [
+ apmActionVariables.serviceName,
+ apmActionVariables.environment,
+ apmActionVariables.threshold,
+ apmActionVariables.triggerValue,
+ apmActionVariables.interval,
+ ],
+ },
+ producer: 'apm',
+ minimumLicenseRequired: 'basic',
+ executor: async ({ services, params }) => {
+ const config = await config$.pipe(take(1)).toPromise();
+ const alertParams = params;
+ const indices = await getApmIndices({
+ config,
+ savedObjectsClient: services.savedObjectsClient,
+ });
- const searchParams = {
- index: indices['apm_oss.errorIndices'],
- size: 0,
- body: {
- track_total_hits: true,
- query: {
- bool: {
- filter: [
- {
- range: {
- '@timestamp': {
- gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
+ const searchParams = {
+ index: indices['apm_oss.errorIndices'],
+ size: 0,
+ body: {
+ query: {
+ bool: {
+ filter: [
+ {
+ range: {
+ '@timestamp': {
+ gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
+ },
},
},
- },
- { term: { [PROCESSOR_EVENT]: ProcessorEvent.error } },
- ...(alertParams.serviceName
- ? [{ term: { [SERVICE_NAME]: alertParams.serviceName } }]
- : []),
- ...environmentQuery(alertParams.environment),
- ],
- },
- },
- aggs: {
- services: {
- terms: {
- field: SERVICE_NAME,
- size: 50,
+ { term: { [PROCESSOR_EVENT]: ProcessorEvent.error } },
+ ...(alertParams.serviceName
+ ? [{ term: { [SERVICE_NAME]: alertParams.serviceName } }]
+ : []),
+ ...environmentQuery(alertParams.environment),
+ ],
},
- aggs: {
- environments: {
- terms: {
- field: SERVICE_ENVIRONMENT,
- size: maxServiceEnvironments,
+ },
+ aggs: {
+ error_counts: {
+ multi_terms: {
+ terms: [
+ { field: SERVICE_NAME },
+ { field: SERVICE_ENVIRONMENT, missing: '' },
+ ],
+ size: 10000,
+ },
+ aggs: {
+ latest: {
+ top_metrics: {
+ metrics: asMutableArray([
+ { field: SERVICE_NAME },
+ { field: SERVICE_ENVIRONMENT },
+ ] as const),
+ sort: {
+ '@timestamp': 'desc' as const,
+ },
+ },
},
},
},
},
},
- },
- };
+ };
+
+ const response = await alertingEsClient(
+ services.scopedClusterClient,
+ searchParams
+ );
- const { body: response } = await alertingEsClient(services, searchParams);
- const errorCount = response.hits.total.value;
+ const errorCountResults =
+ response.aggregations?.error_counts.buckets.map((bucket) => {
+ const latest = bucket.latest.top[0].metrics;
- if (errorCount > alertParams.threshold) {
- function scheduleAction({
- serviceName,
- environment,
- }: {
- serviceName: string;
- environment?: string;
- }) {
- const alertInstanceName = [
- AlertType.ErrorCount,
- serviceName,
- environment,
- ]
- .filter((name) => name)
- .join('_');
+ return {
+ serviceName: latest['service.name'] as string,
+ environment: latest['service.environment'] as string | undefined,
+ errorCount: bucket.doc_count,
+ };
+ }) ?? [];
- const alertInstance = services.alertInstanceFactory(
- alertInstanceName
- );
- alertInstance.scheduleActions(alertTypeConfig.defaultActionGroupId, {
- serviceName,
- environment,
- threshold: alertParams.threshold,
- triggerValue: errorCount,
- interval: `${alertParams.windowSize}${alertParams.windowUnit}`,
+ errorCountResults
+ .filter((result) => result.errorCount >= alertParams.threshold)
+ .forEach((result) => {
+ const { serviceName, environment, errorCount } = result;
+
+ services
+ .alertWithLifecycle({
+ id: [AlertType.ErrorCount, serviceName, environment]
+ .filter((name) => name)
+ .join('_'),
+ fields: {
+ [SERVICE_NAME]: serviceName,
+ ...(environment
+ ? { [SERVICE_ENVIRONMENT]: environment }
+ : {}),
+ [PROCESSOR_EVENT]: 'error',
+ },
+ })
+ .scheduleActions(alertTypeConfig.defaultActionGroupId, {
+ serviceName,
+ environment: environment || ENVIRONMENT_NOT_DEFINED.text,
+ threshold: alertParams.threshold,
+ triggerValue: errorCount,
+ interval: `${alertParams.windowSize}${alertParams.windowUnit}`,
+ });
});
- }
- response.aggregations?.services.buckets.forEach((serviceBucket) => {
- const serviceName = serviceBucket.key as string;
- if (isEmpty(serviceBucket.environments?.buckets)) {
- scheduleAction({ serviceName });
- } else {
- serviceBucket.environments.buckets.forEach((envBucket) => {
- const environment = envBucket.key as string;
- scheduleAction({ serviceName, environment });
- });
- }
- });
- }
- },
- });
+
+ return {};
+ },
+ })
+ );
}
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_alert_type.ts b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_alert_type.ts
index 500e0744d5638..6ca1c4370d6ae 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_alert_type.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_alert_type.ts
@@ -6,10 +6,9 @@
*/
import { schema } from '@kbn/config-schema';
-import { Observable } from 'rxjs';
import { take } from 'rxjs/operators';
-import { APMConfig } from '../..';
-import { AlertingPlugin } from '../../../../alerting/server';
+import { QueryContainer } from '@elastic/elasticsearch/api/types';
+import { parseEnvironmentUrlParam } from '../../../common/environment_filter_values';
import { AlertType, ALERT_TYPES_CONFIG } from '../../../common/alert_types';
import {
PROCESSOR_EVENT,
@@ -24,11 +23,8 @@ import { environmentQuery } from '../../../server/utils/queries';
import { getApmIndices } from '../settings/apm_indices/get_apm_indices';
import { apmActionVariables } from './action_variables';
import { alertingEsClient } from './alerting_es_client';
-
-interface RegisterAlertParams {
- alerting: AlertingPlugin['setup'];
- config$: Observable;
-}
+import { RegisterRuleDependencies } from './register_apm_alerts';
+import { createAPMLifecycleRuleType } from './create_apm_lifecycle_rule_type';
const paramsSchema = schema.object({
serviceName: schema.string(),
@@ -47,116 +43,126 @@ const paramsSchema = schema.object({
const alertTypeConfig = ALERT_TYPES_CONFIG[AlertType.TransactionDuration];
export function registerTransactionDurationAlertType({
- alerting,
+ registry,
config$,
-}: RegisterAlertParams) {
- alerting.registerType({
- id: AlertType.TransactionDuration,
- name: alertTypeConfig.name,
- actionGroups: alertTypeConfig.actionGroups,
- defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
- validate: {
- params: paramsSchema,
- },
- actionVariables: {
- context: [
- apmActionVariables.serviceName,
- apmActionVariables.transactionType,
- apmActionVariables.environment,
- apmActionVariables.threshold,
- apmActionVariables.triggerValue,
- apmActionVariables.interval,
- ],
- },
- producer: 'apm',
- minimumLicenseRequired: 'basic',
- executor: async ({ services, params }) => {
- const config = await config$.pipe(take(1)).toPromise();
- const alertParams = params;
- const indices = await getApmIndices({
- config,
- savedObjectsClient: services.savedObjectsClient,
- });
- const maxServiceEnvironments = config['xpack.apm.maxServiceEnvironments'];
+}: RegisterRuleDependencies) {
+ registry.registerType(
+ createAPMLifecycleRuleType({
+ id: AlertType.TransactionDuration,
+ name: alertTypeConfig.name,
+ actionGroups: alertTypeConfig.actionGroups,
+ defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
+ validate: {
+ params: paramsSchema,
+ },
+ actionVariables: {
+ context: [
+ apmActionVariables.serviceName,
+ apmActionVariables.transactionType,
+ apmActionVariables.environment,
+ apmActionVariables.threshold,
+ apmActionVariables.triggerValue,
+ apmActionVariables.interval,
+ ],
+ },
+ producer: 'apm',
+ minimumLicenseRequired: 'basic',
+ executor: async ({ services, params }) => {
+ const config = await config$.pipe(take(1)).toPromise();
+ const alertParams = params;
+ const indices = await getApmIndices({
+ config,
+ savedObjectsClient: services.savedObjectsClient,
+ });
- const searchParams = {
- index: indices['apm_oss.transactionIndices'],
- size: 0,
- body: {
- query: {
- bool: {
- filter: [
- {
- range: {
- '@timestamp': {
- gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
+ const searchParams = {
+ index: indices['apm_oss.transactionIndices'],
+ size: 0,
+ body: {
+ query: {
+ bool: {
+ filter: [
+ {
+ range: {
+ '@timestamp': {
+ gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
+ },
},
},
- },
- { term: { [PROCESSOR_EVENT]: ProcessorEvent.transaction } },
- { term: { [SERVICE_NAME]: alertParams.serviceName } },
- { term: { [TRANSACTION_TYPE]: alertParams.transactionType } },
- ...environmentQuery(alertParams.environment),
- ],
+ { term: { [PROCESSOR_EVENT]: ProcessorEvent.transaction } },
+ { term: { [SERVICE_NAME]: alertParams.serviceName } },
+ { term: { [TRANSACTION_TYPE]: alertParams.transactionType } },
+ ...environmentQuery(alertParams.environment),
+ ] as QueryContainer[],
+ },
},
- },
- aggs: {
- agg:
- alertParams.aggregationType === 'avg'
- ? { avg: { field: TRANSACTION_DURATION } }
- : {
- percentiles: {
- field: TRANSACTION_DURATION,
- percents: [
- alertParams.aggregationType === '95th' ? 95 : 99,
- ],
+ aggs: {
+ latency:
+ alertParams.aggregationType === 'avg'
+ ? { avg: { field: TRANSACTION_DURATION } }
+ : {
+ percentiles: {
+ field: TRANSACTION_DURATION,
+ percents: [
+ alertParams.aggregationType === '95th' ? 95 : 99,
+ ],
+ },
},
- },
- environments: {
- terms: {
- field: SERVICE_ENVIRONMENT,
- size: maxServiceEnvironments,
- },
},
},
- },
- };
+ };
- const { body: response } = await alertingEsClient(services, searchParams);
+ const response = await alertingEsClient(
+ services.scopedClusterClient,
+ searchParams
+ );
- if (!response.aggregations) {
- return;
- }
+ if (!response.aggregations) {
+ return {};
+ }
- const { agg, environments } = response.aggregations;
+ const { latency } = response.aggregations;
- const transactionDuration =
- 'values' in agg ? Object.values(agg.values)[0] : agg?.value;
+ const transactionDuration =
+ 'values' in latency
+ ? Object.values(latency.values)[0]
+ : latency?.value;
- const threshold = alertParams.threshold * 1000;
+ const threshold = alertParams.threshold * 1000;
- if (transactionDuration && transactionDuration > threshold) {
- const durationFormatter = getDurationFormatter(transactionDuration);
- const transactionDurationFormatted = durationFormatter(
- transactionDuration
- ).formatted;
+ if (transactionDuration && transactionDuration > threshold) {
+ const durationFormatter = getDurationFormatter(transactionDuration);
+ const transactionDurationFormatted = durationFormatter(
+ transactionDuration
+ ).formatted;
- environments.buckets.map((bucket) => {
- const environment = bucket.key;
- const alertInstance = services.alertInstanceFactory(
- `${AlertType.TransactionDuration}_${environment}`
+ const environmentParsed = parseEnvironmentUrlParam(
+ alertParams.environment
);
- alertInstance.scheduleActions(alertTypeConfig.defaultActionGroupId, {
- transactionType: alertParams.transactionType,
- serviceName: alertParams.serviceName,
- environment,
- threshold,
- triggerValue: transactionDurationFormatted,
- interval: `${alertParams.windowSize}${alertParams.windowUnit}`,
- });
- });
- }
- },
- });
+ services
+ .alertWithLifecycle({
+ id: `${AlertType.TransactionDuration}_${environmentParsed.text}`,
+ fields: {
+ [SERVICE_NAME]: alertParams.serviceName,
+ ...(environmentParsed.esFieldValue
+ ? { [SERVICE_ENVIRONMENT]: environmentParsed.esFieldValue }
+ : {}),
+ [TRANSACTION_TYPE]: alertParams.transactionType,
+ },
+ })
+ .scheduleActions(alertTypeConfig.defaultActionGroupId, {
+ transactionType: alertParams.transactionType,
+ serviceName: alertParams.serviceName,
+ environment: environmentParsed.text,
+ threshold,
+ triggerValue: transactionDurationFormatted,
+ interval: `${alertParams.windowSize}${alertParams.windowUnit}`,
+ });
+ }
+
+ return {};
+ },
+ })
+ );
}
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.test.ts b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.test.ts
index 5f6c07cae4b8f..b9346b2bf4649 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.test.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.test.ts
@@ -4,29 +4,11 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
-
-import { Observable } from 'rxjs';
-import * as Rx from 'rxjs';
-import { toArray, map } from 'rxjs/operators';
-import { AlertingPlugin } from '../../../../alerting/server';
import { registerTransactionDurationAnomalyAlertType } from './register_transaction_duration_anomaly_alert_type';
-import { APMConfig } from '../..';
import { ANOMALY_SEVERITY } from '../../../../ml/common';
import { Job, MlPluginSetup } from '../../../../ml/server';
import * as GetServiceAnomalies from '../service_map/get_service_anomalies';
-
-type Operator = (source: Rx.Observable) => Rx.Observable;
-const pipeClosure = (fn: Operator): Operator => {
- return (source: Rx.Observable) => {
- return Rx.defer(() => fn(source));
- };
-};
-const mockedConfig$ = (Rx.of('apm_oss.errorIndices').pipe(
- pipeClosure((source$) => {
- return source$.pipe(map((i) => i));
- }),
- toArray()
-) as unknown) as Observable;
+import { createRuleTypeMocks } from './test_utils';
describe('Transaction duration anomaly alert', () => {
afterEach(() => {
@@ -34,28 +16,21 @@ describe('Transaction duration anomaly alert', () => {
});
describe("doesn't send alert", () => {
it('ml is not defined', async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ const { services, dependencies, executor } = createRuleTypeMocks();
registerTransactionDurationAnomalyAlertType({
- alerting,
+ ...dependencies,
ml: undefined,
- config$: mockedConfig$,
});
- expect(alertExecutor).toBeDefined();
- const services = {
- callCluster: jest.fn(),
- alertInstanceFactory: jest.fn(),
- };
const params = { anomalySeverityType: ANOMALY_SEVERITY.MINOR };
- await alertExecutor!({ services, params });
- expect(services.callCluster).not.toHaveBeenCalled();
+ await executor({ params });
+
+ expect(
+ services.scopedClusterClient.asCurrentUser.search
+ ).not.toHaveBeenCalled();
+
expect(services.alertInstanceFactory).not.toHaveBeenCalled();
});
@@ -64,13 +39,7 @@ describe('Transaction duration anomaly alert', () => {
.spyOn(GetServiceAnomalies, 'getMLJobs')
.mockReturnValue(Promise.resolve([]));
- let alertExecutor: any;
-
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ const { services, dependencies, executor } = createRuleTypeMocks();
const ml = ({
mlSystemProvider: () => ({ mlAnomalySearch: jest.fn() }),
@@ -78,117 +47,47 @@ describe('Transaction duration anomaly alert', () => {
} as unknown) as MlPluginSetup;
registerTransactionDurationAnomalyAlertType({
- alerting,
+ ...dependencies,
ml,
- config$: mockedConfig$,
});
- expect(alertExecutor).toBeDefined();
- const services = {
- callCluster: jest.fn(),
- alertInstanceFactory: jest.fn(),
- };
const params = { anomalySeverityType: ANOMALY_SEVERITY.MINOR };
- await alertExecutor!({ services, params });
- expect(services.callCluster).not.toHaveBeenCalled();
- expect(services.alertInstanceFactory).not.toHaveBeenCalled();
- });
+ await executor({ params });
+ expect(
+ services.scopedClusterClient.asCurrentUser.search
+ ).not.toHaveBeenCalled();
- it('anomaly is less than threshold', async () => {
- jest
- .spyOn(GetServiceAnomalies, 'getMLJobs')
- .mockReturnValue(
- Promise.resolve([{ job_id: '1' }, { job_id: '2' }] as Job[])
- );
-
- let alertExecutor: any;
-
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
-
- const ml = ({
- mlSystemProvider: () => ({
- mlAnomalySearch: () => ({
- hits: { total: { value: 0 } },
- }),
- }),
- anomalyDetectorsProvider: jest.fn(),
- } as unknown) as MlPluginSetup;
-
- registerTransactionDurationAnomalyAlertType({
- alerting,
- ml,
- config$: mockedConfig$,
- });
- expect(alertExecutor).toBeDefined();
-
- const services = {
- callCluster: jest.fn(),
- alertInstanceFactory: jest.fn(),
- };
- const params = { anomalySeverityType: ANOMALY_SEVERITY.MINOR };
-
- await alertExecutor!({ services, params });
- expect(services.callCluster).not.toHaveBeenCalled();
expect(services.alertInstanceFactory).not.toHaveBeenCalled();
});
- });
- describe('sends alert', () => {
- it('with service name, environment and transaction type', async () => {
+ it('anomaly is less than threshold', async () => {
jest.spyOn(GetServiceAnomalies, 'getMLJobs').mockReturnValue(
- Promise.resolve([
+ Promise.resolve(([
{
job_id: '1',
- custom_settings: {
- job_tags: {
- environment: 'production',
- },
- },
- } as unknown,
+ custom_settings: { job_tags: { environment: 'development' } },
+ },
{
job_id: '2',
- custom_settings: {
- job_tags: {
- environment: 'production',
- },
- },
- } as unknown,
- ] as Job[])
+ custom_settings: { job_tags: { environment: 'production' } },
+ },
+ ] as unknown) as Job[])
);
- let alertExecutor: any;
-
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ const { services, dependencies, executor } = createRuleTypeMocks();
const ml = ({
mlSystemProvider: () => ({
mlAnomalySearch: () => ({
- hits: { total: { value: 2 } },
aggregations: {
- services: {
+ anomaly_groups: {
buckets: [
{
- key: 'foo',
- transaction_types: {
- buckets: [{ key: 'type-foo' }],
- },
- record_avg: { value: 80 },
- },
- {
- key: 'bar',
- transaction_types: {
- buckets: [{ key: 'type-bar' }],
+ doc_count: 1,
+ latest_score: {
+ top: [{ metrics: { record_score: 0, job_id: '1' } }],
},
- record_avg: { value: 20 },
},
],
},
@@ -199,84 +98,77 @@ describe('Transaction duration anomaly alert', () => {
} as unknown) as MlPluginSetup;
registerTransactionDurationAnomalyAlertType({
- alerting,
+ ...dependencies,
ml,
- config$: mockedConfig$,
});
- expect(alertExecutor).toBeDefined();
- const scheduleActions = jest.fn();
- const services = {
- callCluster: jest.fn(),
- alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
- };
const params = { anomalySeverityType: ANOMALY_SEVERITY.MINOR };
- await alertExecutor!({ services, params });
-
- await alertExecutor!({ services, params });
- [
- 'apm.transaction_duration_anomaly_foo_production_type-foo',
- 'apm.transaction_duration_anomaly_bar_production_type-bar',
- ].forEach((instanceName) =>
- expect(services.alertInstanceFactory).toHaveBeenCalledWith(instanceName)
- );
+ await executor({ params });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'foo',
- transactionType: 'type-foo',
- environment: 'production',
- threshold: 'minor',
- thresholdValue: 'critical',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- transactionType: 'type-bar',
- environment: 'production',
- threshold: 'minor',
- thresholdValue: 'warning',
- });
+ expect(
+ services.scopedClusterClient.asCurrentUser.search
+ ).not.toHaveBeenCalled();
+ expect(services.alertInstanceFactory).not.toHaveBeenCalled();
});
+ });
- it('with service name', async () => {
+ describe('sends alert', () => {
+ it('for all services that exceeded the threshold', async () => {
jest.spyOn(GetServiceAnomalies, 'getMLJobs').mockReturnValue(
- Promise.resolve([
+ Promise.resolve(([
{
job_id: '1',
- custom_settings: {
- job_tags: {
- environment: 'production',
- },
- },
- } as unknown,
+ custom_settings: { job_tags: { environment: 'development' } },
+ },
{
job_id: '2',
- custom_settings: {
- job_tags: {
- environment: 'testing',
- },
- },
- } as unknown,
- ] as Job[])
+ custom_settings: { job_tags: { environment: 'production' } },
+ },
+ ] as unknown) as Job[])
);
- let alertExecutor: any;
-
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ const {
+ services,
+ dependencies,
+ executor,
+ scheduleActions,
+ } = createRuleTypeMocks();
const ml = ({
mlSystemProvider: () => ({
mlAnomalySearch: () => ({
- hits: { total: { value: 2 } },
aggregations: {
- services: {
+ anomaly_groups: {
buckets: [
- { key: 'foo', record_avg: { value: 80 } },
- { key: 'bar', record_avg: { value: 20 } },
+ {
+ latest_score: {
+ top: [
+ {
+ metrics: {
+ record_score: 80,
+ job_id: '1',
+ partition_field_value: 'foo',
+ by_field_value: 'type-foo',
+ },
+ },
+ ],
+ },
+ },
+ {
+ latest_score: {
+ top: [
+ {
+ metrics: {
+ record_score: 20,
+ job_id: '2',
+ parttition_field_value: 'bar',
+ by_field_value: 'type-bar',
+ },
+ },
+ ],
+ },
+ },
],
},
},
@@ -286,58 +178,26 @@ describe('Transaction duration anomaly alert', () => {
} as unknown) as MlPluginSetup;
registerTransactionDurationAnomalyAlertType({
- alerting,
+ ...dependencies,
ml,
- config$: mockedConfig$,
});
- expect(alertExecutor).toBeDefined();
- const scheduleActions = jest.fn();
- const services = {
- callCluster: jest.fn(),
- alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
- };
const params = { anomalySeverityType: ANOMALY_SEVERITY.MINOR };
- await alertExecutor!({ services, params });
+ await executor({ params });
+
+ expect(services.alertInstanceFactory).toHaveBeenCalledTimes(1);
- await alertExecutor!({ services, params });
- [
- 'apm.transaction_duration_anomaly_foo_production',
- 'apm.transaction_duration_anomaly_foo_testing',
- 'apm.transaction_duration_anomaly_bar_production',
- 'apm.transaction_duration_anomaly_bar_testing',
- ].forEach((instanceName) =>
- expect(services.alertInstanceFactory).toHaveBeenCalledWith(instanceName)
+ expect(services.alertInstanceFactory).toHaveBeenCalledWith(
+ 'apm.transaction_duration_anomaly_foo_development_type-foo'
);
expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
serviceName: 'foo',
- transactionType: undefined,
- environment: 'production',
- threshold: 'minor',
- thresholdValue: 'critical',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- transactionType: undefined,
- environment: 'production',
- threshold: 'minor',
- thresholdValue: 'warning',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'foo',
- transactionType: undefined,
- environment: 'testing',
- threshold: 'minor',
- thresholdValue: 'critical',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- transactionType: undefined,
- environment: 'testing',
+ transactionType: 'type-foo',
+ environment: 'development',
threshold: 'minor',
- thresholdValue: 'warning',
+ triggerValue: 'critical',
});
});
});
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts
index 84c3ec7325fd2..15f4a8ea07801 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts
@@ -6,9 +6,16 @@
*/
import { schema } from '@kbn/config-schema';
-import { Observable } from 'rxjs';
-import { isEmpty } from 'lodash';
+import { compact } from 'lodash';
+import { ESSearchResponse } from 'typings/elasticsearch';
+import { QueryContainer } from '@elastic/elasticsearch/api/types';
import { getSeverity } from '../../../common/anomaly_detection';
+import {
+ SERVICE_ENVIRONMENT,
+ SERVICE_NAME,
+ TRANSACTION_TYPE,
+} from '../../../common/elasticsearch_fieldnames';
+import { asMutableArray } from '../../../common/utils/as_mutable_array';
import { ANOMALY_SEVERITY } from '../../../../ml/common';
import { KibanaRequest } from '../../../../../../src/core/server';
import {
@@ -16,17 +23,11 @@ import {
ALERT_TYPES_CONFIG,
ANOMALY_ALERT_SEVERITY_TYPES,
} from '../../../common/alert_types';
-import { AlertingPlugin } from '../../../../alerting/server';
-import { APMConfig } from '../..';
-import { MlPluginSetup } from '../../../../ml/server';
import { getMLJobs } from '../service_map/get_service_anomalies';
import { apmActionVariables } from './action_variables';
-
-interface RegisterAlertParams {
- alerting: AlertingPlugin['setup'];
- ml?: MlPluginSetup;
- config$: Observable;
-}
+import { RegisterRuleDependencies } from './register_apm_alerts';
+import { parseEnvironmentUrlParam } from '../../../common/environment_filter_values';
+import { createAPMLifecycleRuleType } from './create_apm_lifecycle_rule_type';
const paramsSchema = schema.object({
serviceName: schema.maybe(schema.string()),
@@ -46,203 +47,199 @@ const alertTypeConfig =
ALERT_TYPES_CONFIG[AlertType.TransactionDurationAnomaly];
export function registerTransactionDurationAnomalyAlertType({
- alerting,
+ registry,
ml,
- config$,
-}: RegisterAlertParams) {
- alerting.registerType({
- id: AlertType.TransactionDurationAnomaly,
- name: alertTypeConfig.name,
- actionGroups: alertTypeConfig.actionGroups,
- defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
- validate: {
- params: paramsSchema,
- },
- actionVariables: {
- context: [
- apmActionVariables.serviceName,
- apmActionVariables.transactionType,
- apmActionVariables.environment,
- apmActionVariables.threshold,
- apmActionVariables.triggerValue,
- ],
- },
- producer: 'apm',
- minimumLicenseRequired: 'basic',
- executor: async ({ services, params, state }) => {
- if (!ml) {
- return;
- }
- const alertParams = params;
- const request = {} as KibanaRequest;
- const { mlAnomalySearch } = ml.mlSystemProvider(
- request,
- services.savedObjectsClient
- );
- const anomalyDetectors = ml.anomalyDetectorsProvider(
- request,
- services.savedObjectsClient
- );
-
- const mlJobs = await getMLJobs(anomalyDetectors, alertParams.environment);
-
- const selectedOption = ANOMALY_ALERT_SEVERITY_TYPES.find(
- (option) => option.type === alertParams.anomalySeverityType
- );
-
- if (!selectedOption) {
- throw new Error(
- `Anomaly alert severity type ${alertParams.anomalySeverityType} is not supported.`
+ logger,
+}: RegisterRuleDependencies) {
+ registry.registerType(
+ createAPMLifecycleRuleType({
+ id: AlertType.TransactionDurationAnomaly,
+ name: alertTypeConfig.name,
+ actionGroups: alertTypeConfig.actionGroups,
+ defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
+ validate: {
+ params: paramsSchema,
+ },
+ actionVariables: {
+ context: [
+ apmActionVariables.serviceName,
+ apmActionVariables.transactionType,
+ apmActionVariables.environment,
+ apmActionVariables.threshold,
+ apmActionVariables.triggerValue,
+ ],
+ },
+ producer: 'apm',
+ minimumLicenseRequired: 'basic',
+ executor: async ({ services, params }) => {
+ if (!ml) {
+ return {};
+ }
+ const alertParams = params;
+ const request = {} as KibanaRequest;
+ const { mlAnomalySearch } = ml.mlSystemProvider(
+ request,
+ services.savedObjectsClient
+ );
+ const anomalyDetectors = ml.anomalyDetectorsProvider(
+ request,
+ services.savedObjectsClient
);
- }
- const threshold = selectedOption.threshold;
+ const mlJobs = await getMLJobs(
+ anomalyDetectors,
+ alertParams.environment
+ );
- if (mlJobs.length === 0) {
- return {};
- }
-
- const jobIds = mlJobs.map((job) => job.job_id);
- const anomalySearchParams = {
- terminateAfter: 1,
- body: {
- size: 0,
- query: {
- bool: {
- filter: [
- { term: { result_type: 'record' } },
- { terms: { job_id: jobIds } },
- {
- range: {
- timestamp: {
- gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
- format: 'epoch_millis',
+ const selectedOption = ANOMALY_ALERT_SEVERITY_TYPES.find(
+ (option) => option.type === alertParams.anomalySeverityType
+ );
+
+ if (!selectedOption) {
+ throw new Error(
+ `Anomaly alert severity type ${alertParams.anomalySeverityType} is not supported.`
+ );
+ }
+
+ const threshold = selectedOption.threshold;
+
+ if (mlJobs.length === 0) {
+ return {};
+ }
+
+ const jobIds = mlJobs.map((job) => job.job_id);
+ const anomalySearchParams = {
+ body: {
+ size: 0,
+ query: {
+ bool: {
+ filter: [
+ { term: { result_type: 'record' } },
+ { terms: { job_id: jobIds } },
+ { term: { is_interim: false } },
+ {
+ range: {
+ timestamp: {
+ gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
+ format: 'epoch_millis',
+ },
},
},
- },
- ...(alertParams.serviceName
- ? [
- {
- term: {
- partition_field_value: alertParams.serviceName,
+ ...(alertParams.serviceName
+ ? [
+ {
+ term: {
+ partition_field_value: alertParams.serviceName,
+ },
},
- },
- ]
- : []),
- ...(alertParams.transactionType
- ? [
- {
- term: {
- by_field_value: alertParams.transactionType,
+ ]
+ : []),
+ ...(alertParams.transactionType
+ ? [
+ {
+ term: {
+ by_field_value: alertParams.transactionType,
+ },
},
- },
- ]
- : []),
- {
- range: {
- record_score: {
- gte: threshold,
- },
- },
- },
- ],
- },
- },
- aggs: {
- services: {
- terms: {
- field: 'partition_field_value',
- size: 50,
+ ]
+ : []),
+ ] as QueryContainer[],
},
- aggs: {
- transaction_types: {
- terms: {
- field: 'by_field_value',
- },
+ },
+ aggs: {
+ anomaly_groups: {
+ multi_terms: {
+ terms: [
+ { field: 'partition_field_value' },
+ { field: 'by_field_value' },
+ { field: 'job_id' },
+ ],
+ size: 10000,
},
- record_avg: {
- avg: {
- field: 'record_score',
+ aggs: {
+ latest_score: {
+ top_metrics: {
+ metrics: asMutableArray([
+ { field: 'record_score' },
+ { field: 'partition_field_value' },
+ { field: 'by_field_value' },
+ { field: 'job_id' },
+ ] as const),
+ sort: {
+ '@timestamp': 'desc' as const,
+ },
+ },
},
},
},
},
},
- },
- };
-
- const response = ((await mlAnomalySearch(
- anomalySearchParams,
- jobIds
- )) as unknown) as {
- hits: { total: { value: number } };
- aggregations?: {
- services: {
- buckets: Array<{
- key: string;
- record_avg: { value: number };
- transaction_types: { buckets: Array<{ key: string }> };
- }>;
- };
};
- };
-
- const hitCount = response.hits.total.value;
-
- if (hitCount > 0) {
- function scheduleAction({
- serviceName,
- severity,
- environment,
- transactionType,
- }: {
- serviceName: string;
- severity: string;
- environment?: string;
- transactionType?: string;
- }) {
- const alertInstanceName = [
- AlertType.TransactionDurationAnomaly,
- serviceName,
- environment,
- transactionType,
- ]
- .filter((name) => name)
- .join('_');
-
- const alertInstance = services.alertInstanceFactory(
- alertInstanceName
- );
- alertInstance.scheduleActions(alertTypeConfig.defaultActionGroupId, {
- serviceName,
- environment,
- transactionType,
- threshold: selectedOption?.label,
- thresholdValue: severity,
- });
- }
- mlJobs.map((job) => {
- const environment = job.custom_settings?.job_tags?.environment;
- response.aggregations?.services.buckets.forEach((serviceBucket) => {
- const serviceName = serviceBucket.key as string;
- const severity = getSeverity(serviceBucket.record_avg.value);
- if (isEmpty(serviceBucket.transaction_types?.buckets)) {
- scheduleAction({ serviceName, severity, environment });
- } else {
- serviceBucket.transaction_types?.buckets.forEach((typeBucket) => {
- const transactionType = typeBucket.key as string;
- scheduleAction({
- serviceName,
- severity,
- environment,
- transactionType,
- });
- });
- }
- });
+ const response: ESSearchResponse<
+ unknown,
+ typeof anomalySearchParams
+ > = (await mlAnomalySearch(anomalySearchParams, [])) as any;
+
+ const anomalies =
+ response.aggregations?.anomaly_groups.buckets
+ .map((bucket) => {
+ const latest = bucket.latest_score.top[0].metrics;
+
+ const job = mlJobs.find((j) => j.job_id === latest.job_id);
+
+ if (!job) {
+ logger.warn(
+ `Could not find matching job for job id ${latest.job_id}`
+ );
+ return undefined;
+ }
+
+ return {
+ serviceName: latest.partition_field_value as string,
+ transactionType: latest.by_field_value as string,
+ environment: job.custom_settings!.job_tags!.environment,
+ score: latest.record_score as number,
+ };
+ })
+ .filter((anomaly) =>
+ anomaly ? anomaly.score >= threshold : false
+ ) ?? [];
+
+ compact(anomalies).forEach((anomaly) => {
+ const { serviceName, environment, transactionType, score } = anomaly;
+
+ const parsedEnvironment = parseEnvironmentUrlParam(environment);
+
+ services
+ .alertWithLifecycle({
+ id: [
+ AlertType.TransactionDurationAnomaly,
+ serviceName,
+ environment,
+ transactionType,
+ ]
+ .filter((name) => name)
+ .join('_'),
+ fields: {
+ [SERVICE_NAME]: serviceName,
+ ...(parsedEnvironment.esFieldValue
+ ? { [SERVICE_ENVIRONMENT]: environment }
+ : {}),
+ [TRANSACTION_TYPE]: transactionType,
+ },
+ })
+ .scheduleActions(alertTypeConfig.defaultActionGroupId, {
+ serviceName,
+ transactionType,
+ environment,
+ threshold: selectedOption?.label,
+ triggerValue: getSeverity(score),
+ });
});
- }
- },
- });
+
+ return {};
+ },
+ })
+ );
}
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.test.ts b/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.test.ts
index 148cd813a8a22..be5f4705482d0 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.test.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.test.ts
@@ -5,48 +5,19 @@
* 2.0.
*/
-import { Observable } from 'rxjs';
-import * as Rx from 'rxjs';
-import { toArray, map } from 'rxjs/operators';
-import { AlertingPlugin } from '../../../../alerting/server';
-import { APMConfig } from '../..';
import { registerTransactionErrorRateAlertType } from './register_transaction_error_rate_alert_type';
-import { elasticsearchServiceMock } from 'src/core/server/mocks';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { elasticsearchClientMock } from 'src/core/server/elasticsearch/client/mocks';
-
-type Operator = (source: Rx.Observable) => Rx.Observable;
-const pipeClosure = (fn: Operator): Operator => {
- return (source: Rx.Observable) => {
- return Rx.defer(() => fn(source));
- };
-};
-const mockedConfig$ = (Rx.of('apm_oss.errorIndices').pipe(
- pipeClosure((source$) => {
- return source$.pipe(map((i) => i));
- }),
- toArray()
-) as unknown) as Observable;
+import { createRuleTypeMocks } from './test_utils';
describe('Transaction error rate alert', () => {
it("doesn't send an alert when rate is less than threshold", async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ const { services, dependencies, executor } = createRuleTypeMocks();
registerTransactionErrorRateAlertType({
- alerting,
- config$: mockedConfig$,
+ ...dependencies,
});
- expect(alertExecutor).toBeDefined();
- const services = {
- scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
- alertInstanceFactory: jest.fn(),
- };
const params = { threshold: 1 };
services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
@@ -60,6 +31,11 @@ describe('Transaction error rate alert', () => {
},
took: 0,
timed_out: false,
+ aggregations: {
+ series: {
+ buckets: [],
+ },
+ },
_shards: {
failed: 0,
skipped: 0,
@@ -69,30 +45,21 @@ describe('Transaction error rate alert', () => {
})
);
- await alertExecutor!({ services, params });
+ await executor({ params });
expect(services.alertInstanceFactory).not.toBeCalled();
});
- it('sends alerts with service name, transaction type and environment', async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
+ it('sends alerts for services that exceeded the threshold', async () => {
+ const {
+ services,
+ dependencies,
+ executor,
+ scheduleActions,
+ } = createRuleTypeMocks();
registerTransactionErrorRateAlertType({
- alerting,
- config$: mockedConfig$,
+ ...dependencies,
});
- expect(alertExecutor).toBeDefined();
-
- const scheduleActions = jest.fn();
- const services = {
- scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
- alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
- };
- const params = { threshold: 10, windowSize: 5, windowUnit: 'm' };
services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
elasticsearchClientMock.createSuccessTransportRequestPromise({
@@ -100,37 +67,38 @@ describe('Transaction error rate alert', () => {
hits: [],
total: {
relation: 'eq',
- value: 4,
+ value: 0,
},
},
aggregations: {
- failed_transactions: {
- doc_count: 2,
- },
- services: {
+ series: {
buckets: [
{
- key: 'foo',
- transaction_types: {
+ key: ['foo', 'env-foo', 'type-foo'],
+ outcomes: {
buckets: [
{
- key: 'type-foo',
- environments: {
- buckets: [{ key: 'env-foo' }, { key: 'env-foo-2' }],
- },
+ key: 'success',
+ doc_count: 90,
+ },
+ {
+ key: 'failure',
+ doc_count: 10,
},
],
},
},
{
- key: 'bar',
- transaction_types: {
+ key: ['bar', 'env-bar', 'type-bar'],
+ outcomes: {
buckets: [
{
- key: 'type-bar',
- environments: {
- buckets: [{ key: 'env-bar' }, { key: 'env-bar-2' }],
- },
+ key: 'success',
+ doc_count: 90,
+ },
+ {
+ key: 'failure',
+ doc_count: 1,
},
],
},
@@ -149,208 +117,25 @@ describe('Transaction error rate alert', () => {
})
);
- await alertExecutor!({ services, params });
- [
- 'apm.transaction_error_rate_foo_type-foo_env-foo',
- 'apm.transaction_error_rate_foo_type-foo_env-foo-2',
- 'apm.transaction_error_rate_bar_type-bar_env-bar',
- 'apm.transaction_error_rate_bar_type-bar_env-bar-2',
- ].forEach((instanceName) =>
- expect(services.alertInstanceFactory).toHaveBeenCalledWith(instanceName)
- );
-
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'foo',
- transactionType: 'type-foo',
- environment: 'env-foo',
- threshold: 10,
- triggerValue: '50',
- interval: '5m',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'foo',
- transactionType: 'type-foo',
- environment: 'env-foo-2',
- threshold: 10,
- triggerValue: '50',
- interval: '5m',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- transactionType: 'type-bar',
- environment: 'env-bar',
- threshold: 10,
- triggerValue: '50',
- interval: '5m',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- transactionType: 'type-bar',
- environment: 'env-bar-2',
- threshold: 10,
- triggerValue: '50',
- interval: '5m',
- });
- });
- it('sends alerts with service name and transaction type', async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
-
- registerTransactionErrorRateAlertType({
- alerting,
- config$: mockedConfig$,
- });
- expect(alertExecutor).toBeDefined();
-
- const scheduleActions = jest.fn();
- const services = {
- scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
- alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
- };
const params = { threshold: 10, windowSize: 5, windowUnit: 'm' };
- services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
- elasticsearchClientMock.createSuccessTransportRequestPromise({
- hits: {
- hits: [],
- total: {
- relation: 'eq',
- value: 4,
- },
- },
- aggregations: {
- failed_transactions: {
- doc_count: 2,
- },
- services: {
- buckets: [
- {
- key: 'foo',
- transaction_types: {
- buckets: [{ key: 'type-foo' }],
- },
- },
- {
- key: 'bar',
- transaction_types: {
- buckets: [{ key: 'type-bar' }],
- },
- },
- ],
- },
- },
- took: 0,
- timed_out: false,
- _shards: {
- failed: 0,
- skipped: 0,
- successful: 1,
- total: 1,
- },
- })
- );
-
- await alertExecutor!({ services, params });
- [
- 'apm.transaction_error_rate_foo_type-foo',
- 'apm.transaction_error_rate_bar_type-bar',
- ].forEach((instanceName) =>
- expect(services.alertInstanceFactory).toHaveBeenCalledWith(instanceName)
- );
-
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'foo',
- transactionType: 'type-foo',
- environment: undefined,
- threshold: 10,
- triggerValue: '50',
- interval: '5m',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- transactionType: 'type-bar',
- environment: undefined,
- threshold: 10,
- triggerValue: '50',
- interval: '5m',
- });
- });
-
- it('sends alerts with service name', async () => {
- let alertExecutor: any;
- const alerting = {
- registerType: ({ executor }) => {
- alertExecutor = executor;
- },
- } as AlertingPlugin['setup'];
-
- registerTransactionErrorRateAlertType({
- alerting,
- config$: mockedConfig$,
- });
- expect(alertExecutor).toBeDefined();
+ await executor({ params });
- const scheduleActions = jest.fn();
- const services = {
- scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
- alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
- };
- const params = { threshold: 10, windowSize: 5, windowUnit: 'm' };
+ expect(services.alertInstanceFactory).toHaveBeenCalledTimes(1);
- services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
- elasticsearchClientMock.createSuccessTransportRequestPromise({
- hits: {
- hits: [],
- total: {
- value: 4,
- relation: 'eq',
- },
- },
- aggregations: {
- failed_transactions: {
- doc_count: 2,
- },
- services: {
- buckets: [{ key: 'foo' }, { key: 'bar' }],
- },
- },
- took: 0,
- timed_out: false,
- _shards: {
- failed: 0,
- skipped: 0,
- successful: 1,
- total: 1,
- },
- })
+ expect(services.alertInstanceFactory).toHaveBeenCalledWith(
+ 'apm.transaction_error_rate_foo_type-foo_env-foo'
);
-
- await alertExecutor!({ services, params });
- [
- 'apm.transaction_error_rate_foo',
- 'apm.transaction_error_rate_bar',
- ].forEach((instanceName) =>
- expect(services.alertInstanceFactory).toHaveBeenCalledWith(instanceName)
+ expect(services.alertInstanceFactory).not.toHaveBeenCalledWith(
+ 'apm.transaction_error_rate_bar_type-bar_env-bar'
);
expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
serviceName: 'foo',
- transactionType: undefined,
- environment: undefined,
- threshold: 10,
- triggerValue: '50',
- interval: '5m',
- });
- expect(scheduleActions).toHaveBeenCalledWith('threshold_met', {
- serviceName: 'bar',
- transactionType: undefined,
- environment: undefined,
+ transactionType: 'type-foo',
+ environment: 'env-foo',
threshold: 10,
- triggerValue: '50',
+ triggerValue: '10',
interval: '5m',
});
});
diff --git a/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.ts b/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.ts
index 0b2684cdaf083..0865bed41142e 100644
--- a/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.ts
+++ b/x-pack/plugins/apm/server/lib/alerts/register_transaction_error_rate_alert_type.ts
@@ -6,11 +6,7 @@
*/
import { schema } from '@kbn/config-schema';
-import { isEmpty } from 'lodash';
-import { Observable } from 'rxjs';
import { take } from 'rxjs/operators';
-import { APMConfig } from '../..';
-import { AlertingPlugin } from '../../../../alerting/server';
import { AlertType, ALERT_TYPES_CONFIG } from '../../../common/alert_types';
import {
EVENT_OUTCOME,
@@ -26,11 +22,8 @@ import { environmentQuery } from '../../../server/utils/queries';
import { getApmIndices } from '../settings/apm_indices/get_apm_indices';
import { apmActionVariables } from './action_variables';
import { alertingEsClient } from './alerting_es_client';
-
-interface RegisterAlertParams {
- alerting: AlertingPlugin['setup'];
- config$: Observable;
-}
+import { createAPMLifecycleRuleType } from './create_apm_lifecycle_rule_type';
+import { RegisterRuleDependencies } from './register_apm_alerts';
const paramsSchema = schema.object({
windowSize: schema.number(),
@@ -44,158 +37,165 @@ const paramsSchema = schema.object({
const alertTypeConfig = ALERT_TYPES_CONFIG[AlertType.TransactionErrorRate];
export function registerTransactionErrorRateAlertType({
- alerting,
+ registry,
config$,
-}: RegisterAlertParams) {
- alerting.registerType({
- id: AlertType.TransactionErrorRate,
- name: alertTypeConfig.name,
- actionGroups: alertTypeConfig.actionGroups,
- defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
- validate: {
- params: paramsSchema,
- },
- actionVariables: {
- context: [
- apmActionVariables.transactionType,
- apmActionVariables.serviceName,
- apmActionVariables.environment,
- apmActionVariables.threshold,
- apmActionVariables.triggerValue,
- apmActionVariables.interval,
- ],
- },
- producer: 'apm',
- minimumLicenseRequired: 'basic',
- executor: async ({ services, params: alertParams }) => {
- const config = await config$.pipe(take(1)).toPromise();
- const indices = await getApmIndices({
- config,
- savedObjectsClient: services.savedObjectsClient,
- });
- const maxServiceEnvironments = config['xpack.apm.maxServiceEnvironments'];
+}: RegisterRuleDependencies) {
+ registry.registerType(
+ createAPMLifecycleRuleType({
+ id: AlertType.TransactionErrorRate,
+ name: alertTypeConfig.name,
+ actionGroups: alertTypeConfig.actionGroups,
+ defaultActionGroupId: alertTypeConfig.defaultActionGroupId,
+ validate: {
+ params: paramsSchema,
+ },
+ actionVariables: {
+ context: [
+ apmActionVariables.transactionType,
+ apmActionVariables.serviceName,
+ apmActionVariables.environment,
+ apmActionVariables.threshold,
+ apmActionVariables.triggerValue,
+ apmActionVariables.interval,
+ ],
+ },
+ producer: 'apm',
+ minimumLicenseRequired: 'basic',
+ executor: async ({ services, params: alertParams }) => {
+ const config = await config$.pipe(take(1)).toPromise();
+ const indices = await getApmIndices({
+ config,
+ savedObjectsClient: services.savedObjectsClient,
+ });
- const searchParams = {
- index: indices['apm_oss.transactionIndices'],
- size: 0,
- body: {
- track_total_hits: true,
- query: {
- bool: {
- filter: [
- {
- range: {
- '@timestamp': {
- gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
+ const searchParams = {
+ index: indices['apm_oss.transactionIndices'],
+ size: 1,
+ body: {
+ query: {
+ bool: {
+ filter: [
+ {
+ range: {
+ '@timestamp': {
+ gte: `now-${alertParams.windowSize}${alertParams.windowUnit}`,
+ },
},
},
- },
- { term: { [PROCESSOR_EVENT]: ProcessorEvent.transaction } },
- ...(alertParams.serviceName
- ? [{ term: { [SERVICE_NAME]: alertParams.serviceName } }]
- : []),
- ...(alertParams.transactionType
- ? [
- {
- term: {
- [TRANSACTION_TYPE]: alertParams.transactionType,
+ { term: { [PROCESSOR_EVENT]: ProcessorEvent.transaction } },
+ {
+ terms: {
+ [EVENT_OUTCOME]: [
+ EventOutcome.failure,
+ EventOutcome.success,
+ ],
+ },
+ },
+ ...(alertParams.serviceName
+ ? [{ term: { [SERVICE_NAME]: alertParams.serviceName } }]
+ : []),
+ ...(alertParams.transactionType
+ ? [
+ {
+ term: {
+ [TRANSACTION_TYPE]: alertParams.transactionType,
+ },
},
- },
- ]
- : []),
- ...environmentQuery(alertParams.environment),
- ],
- },
- },
- aggs: {
- failed_transactions: {
- filter: { term: { [EVENT_OUTCOME]: EventOutcome.failure } },
- },
- services: {
- terms: {
- field: SERVICE_NAME,
- size: 50,
+ ]
+ : []),
+ ...environmentQuery(alertParams.environment),
+ ],
},
- aggs: {
- transaction_types: {
- terms: { field: TRANSACTION_TYPE },
- aggs: {
- environments: {
- terms: {
- field: SERVICE_ENVIRONMENT,
- size: maxServiceEnvironments,
- },
+ },
+ aggs: {
+ series: {
+ multi_terms: {
+ terms: [
+ { field: SERVICE_NAME },
+ { field: SERVICE_ENVIRONMENT, missing: '' },
+ { field: TRANSACTION_TYPE },
+ ],
+ size: 10000,
+ },
+ aggs: {
+ outcomes: {
+ terms: {
+ field: EVENT_OUTCOME,
},
},
},
},
},
},
- },
- };
+ };
- const { body: response } = await alertingEsClient(services, searchParams);
- if (!response.aggregations) {
- return;
- }
+ const response = await alertingEsClient(
+ services.scopedClusterClient,
+ searchParams
+ );
- const failedTransactionCount =
- response.aggregations.failed_transactions.doc_count;
- const totalTransactionCount = response.hits.total.value;
- const transactionErrorRate =
- (failedTransactionCount / totalTransactionCount) * 100;
+ if (!response.aggregations) {
+ return {};
+ }
- if (transactionErrorRate > alertParams.threshold) {
- function scheduleAction({
- serviceName,
- environment,
- transactionType,
- }: {
- serviceName: string;
- environment?: string;
- transactionType?: string;
- }) {
- const alertInstanceName = [
- AlertType.TransactionErrorRate,
- serviceName,
- transactionType,
- environment,
- ]
- .filter((name) => name)
- .join('_');
+ const results = response.aggregations.series.buckets
+ .map((bucket) => {
+ const [serviceName, environment, transactionType] = bucket.key;
+
+ const failed =
+ bucket.outcomes.buckets.find(
+ (outcomeBucket) => outcomeBucket.key === EventOutcome.failure
+ )?.doc_count ?? 0;
+ const succesful =
+ bucket.outcomes.buckets.find(
+ (outcomeBucket) => outcomeBucket.key === EventOutcome.success
+ )?.doc_count ?? 0;
- const alertInstance = services.alertInstanceFactory(
- alertInstanceName
- );
- alertInstance.scheduleActions(alertTypeConfig.defaultActionGroupId, {
+ return {
+ serviceName,
+ environment,
+ transactionType,
+ errorRate: (failed / (failed + succesful)) * 100,
+ };
+ })
+ .filter((result) => result.errorRate >= alertParams.threshold);
+
+ results.forEach((result) => {
+ const {
serviceName,
- transactionType,
environment,
- threshold: alertParams.threshold,
- triggerValue: asDecimalOrInteger(transactionErrorRate),
- interval: `${alertParams.windowSize}${alertParams.windowUnit}`,
- });
- }
+ transactionType,
+ errorRate,
+ } = result;
- response.aggregations?.services.buckets.forEach((serviceBucket) => {
- const serviceName = serviceBucket.key as string;
- if (isEmpty(serviceBucket.transaction_types?.buckets)) {
- scheduleAction({ serviceName });
- } else {
- serviceBucket.transaction_types.buckets.forEach((typeBucket) => {
- const transactionType = typeBucket.key as string;
- if (isEmpty(typeBucket.environments?.buckets)) {
- scheduleAction({ serviceName, transactionType });
- } else {
- typeBucket.environments.buckets.forEach((envBucket) => {
- const environment = envBucket.key as string;
- scheduleAction({ serviceName, transactionType, environment });
- });
- }
+ services
+ .alertWithLifecycle({
+ id: [
+ AlertType.TransactionErrorRate,
+ serviceName,
+ transactionType,
+ environment,
+ ]
+ .filter((name) => name)
+ .join('_'),
+ fields: {
+ [SERVICE_NAME]: serviceName,
+ ...(environment ? { [SERVICE_ENVIRONMENT]: environment } : {}),
+ [TRANSACTION_TYPE]: transactionType,
+ },
+ })
+ .scheduleActions(alertTypeConfig.defaultActionGroupId, {
+ serviceName,
+ transactionType,
+ environment,
+ threshold: alertParams.threshold,
+ triggerValue: asDecimalOrInteger(errorRate),
+ interval: `${alertParams.windowSize}${alertParams.windowUnit}`,
});
- }
});
- }
- },
- });
+
+ return {};
+ },
+ })
+ );
}
diff --git a/x-pack/plugins/apm/server/lib/alerts/test_utils/index.ts b/x-pack/plugins/apm/server/lib/alerts/test_utils/index.ts
new file mode 100644
index 0000000000000..37b3e282d0a59
--- /dev/null
+++ b/x-pack/plugins/apm/server/lib/alerts/test_utils/index.ts
@@ -0,0 +1,64 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { Logger } from 'kibana/server';
+import { of } from 'rxjs';
+import { elasticsearchServiceMock } from 'src/core/server/mocks';
+import { APMConfig } from '../../..';
+import { APMRuleRegistry } from '../../../plugin';
+
+export const createRuleTypeMocks = () => {
+ let alertExecutor: (...args: any[]) => Promise;
+
+ const mockedConfig$ = of({
+ /* eslint-disable @typescript-eslint/naming-convention */
+ 'apm_oss.errorIndices': 'apm-*',
+ 'apm_oss.transactionIndices': 'apm-*',
+ /* eslint-enable @typescript-eslint/naming-convention */
+ } as APMConfig);
+
+ const loggerMock = ({
+ debug: jest.fn(),
+ warn: jest.fn(),
+ error: jest.fn(),
+ } as unknown) as Logger;
+
+ const registry = {
+ registerType: ({ executor }) => {
+ alertExecutor = executor;
+ },
+ } as APMRuleRegistry;
+
+ const scheduleActions = jest.fn();
+
+ const services = {
+ scopedClusterClient: elasticsearchServiceMock.createScopedClusterClient(),
+ scopedRuleRegistryClient: {
+ bulkIndex: jest.fn(),
+ },
+ alertInstanceFactory: jest.fn(() => ({ scheduleActions })),
+ alertWithLifecycle: jest.fn(),
+ logger: loggerMock,
+ };
+
+ return {
+ dependencies: {
+ registry,
+ config$: mockedConfig$,
+ logger: loggerMock,
+ },
+ services,
+ scheduleActions,
+ executor: async ({ params }: { params: Record }) => {
+ return alertExecutor({
+ services,
+ params,
+ startedAt: new Date(),
+ });
+ },
+ };
+};
diff --git a/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_async_with_debug.ts b/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_async_with_debug.ts
index 1f0aa401bcab0..989297544c78f 100644
--- a/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_async_with_debug.ts
+++ b/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_async_with_debug.ts
@@ -10,7 +10,7 @@
import { omit } from 'lodash';
import chalk from 'chalk';
import { KibanaRequest } from '../../../../../../../src/core/server';
-import { inspectableEsQueriesMap } from '../../../routes/create_api';
+import { inspectableEsQueriesMap } from '../../../routes/register_routes';
function formatObj(obj: Record) {
return JSON.stringify(obj, null, 2);
diff --git a/x-pack/plugins/apm/server/lib/helpers/create_es_client/create_internal_es_client/index.ts b/x-pack/plugins/apm/server/lib/helpers/create_es_client/create_internal_es_client/index.ts
index 45e17c1678518..9d7434d127ead 100644
--- a/x-pack/plugins/apm/server/lib/helpers/create_es_client/create_internal_es_client/index.ts
+++ b/x-pack/plugins/apm/server/lib/helpers/create_es_client/create_internal_es_client/index.ts
@@ -5,7 +5,6 @@
* 2.0.
*/
-import { KibanaRequest } from 'src/core/server';
import { TransportRequestPromise } from '@elastic/elasticsearch/lib/Transport';
import {
CreateIndexRequest,
@@ -13,7 +12,7 @@ import {
IndexRequest,
} from '@elastic/elasticsearch/api/types';
import { unwrapEsResponse } from '../../../../../../observability/server';
-import { APMRequestHandlerContext } from '../../../../routes/typings';
+import { APMRouteHandlerResources } from '../../../../routes/typings';
import {
ESSearchResponse,
ESSearchRequest,
@@ -31,11 +30,9 @@ export type APMInternalClient = ReturnType;
export function createInternalESClient({
context,
+ debug,
request,
-}: {
- context: APMRequestHandlerContext;
- request: KibanaRequest;
-}) {
+}: Pick & { debug: boolean }) {
const { asInternalUser } = context.core.elasticsearch.client;
function callEs