diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index a64ab63494b35..61369a37ec3c2 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -177,6 +177,8 @@
/x-pack/test/functional/services/ml/ @elastic/ml-ui
/x-pack/test/functional_basic/apps/ml/ @elastic/ml-ui
/x-pack/test/functional_with_es_ssl/apps/ml/ @elastic/ml-ui
+/x-pack/test/alerting_api_integration/spaces_only/tests/alerting/ml_rule_types/ @elastic/ml-ui
+/x-pack/test/alerting_api_integration/spaces_only/tests/alerting/transform_rule_types/ @elastic/ml-ui
# ML team owns and maintains the transform plugin despite it living in the Data management section.
/x-pack/plugins/transform/ @elastic/ml-ui
diff --git a/NOTICE.txt b/NOTICE.txt
index 4ede43610ca7b..1694193892e16 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -295,7 +295,7 @@ MIT License http://www.opensource.org/licenses/mit-license
---
This product includes code that is adapted from mapbox-gl-js, which is
available under a "BSD-3-Clause" license.
-https://github.com/mapbox/mapbox-gl-js/blob/master/src/util/image.js
+https://github.com/mapbox/mapbox-gl-js/blob/v1.13.2/src/util/image.js
Copyright (c) 2016, Mapbox
diff --git a/docs/developer/getting-started/debugging.asciidoc b/docs/developer/getting-started/debugging.asciidoc
index f3308a1267386..1254462d2e4ea 100644
--- a/docs/developer/getting-started/debugging.asciidoc
+++ b/docs/developer/getting-started/debugging.asciidoc
@@ -130,71 +130,3 @@ Once you're finished, you can stop Kibana normally, then stop the {es} and APM s
----
./scripts/compose.py stop
----
-
-=== Using {kib} server logs
-{kib} Logs is a great way to see what's going on in your application and to debug performance issues. Navigating through a large number of generated logs can be overwhelming, and following are some techniques that you can use to optimize the process.
-
-Start by defining a problem area that you are interested in. For example, you might be interested in seeing how a particular {kib} Plugin is performing, so no need to gather logs for all of {kib}. Or you might want to focus on a particular feature, such as requests from the {kib} server to the {es} server.
-Depending on your needs, you can configure {kib} to generate logs for a specific feature.
-[source,yml]
-----
-logging:
- appenders:
- file:
- type: file
- fileName: ./kibana.log
- layout:
- type: json
-
-### gather all the Kibana logs into a file
-logging.root:
- appenders: [file]
- level: all
-
-### or gather a subset of the logs
-logging.loggers:
- ### responses to an HTTP request
- - name: http.server.response
- level: debug
- appenders: [file]
- ### result of a query to the Elasticsearch server
- - name: elasticsearch.query
- level: debug
- appenders: [file]
- ### logs generated by my plugin
- - name: plugins.myPlugin
- level: debug
- appenders: [file]
-----
-WARNING: Kibana's `file` appender is configured to produce logs in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format. It's the only format that includes the meta information necessary for https://www.elastic.co/guide/en/apm/agent/nodejs/current/log-correlation.html[log correlation] out-of-the-box.
-
-The next step is to define what https://www.elastic.co/observability[observability tools] are available.
-For a better experience, set up an https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[Observability integration] provided by Elastic to debug your application with the <>
-To debug something quickly without setting up additional tooling, you can work with <>
-
-[[debugging-logs-apm-ui]]
-==== APM UI
-*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
-
-To debug {kib} with the APM UI, you must set up the APM infrastructure. You can find instructions for the setup process
-https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[on the Observability integrations page].
-
-Once you set up the APM infrastructure, you can enable the APM agent and put {kib} under load to collect APM events. To analyze the collected metrics and logs, use the APM UI as demonstrated https://www.elastic.co/guide/en/kibana/master/transactions.html#transaction-trace-sample[in the docs].
-
-[[plain-kibana-logs]]
-==== Plain {kib} logs
-*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
-
-Open {kib} Logs and search for an operation you are interested in.
-For example, suppose you want to investigate the response times for queries to the `/api/telemetry/v2/clusters/_stats` {kib} endpoint.
-Open Kibana Logs and search for the HTTP server response for the endpoint. It looks similar to the following (some fields are omitted for brevity).
-[source,json]
-----
-{
- "message":"POST /api/telemetry/v2/clusters/_stats 200 1014ms - 43.2KB",
- "log":{"level":"DEBUG","logger":"http.server.response"},
- "trace":{"id":"9b99131a6f66587971ef085ef97dfd07"},
- "transaction":{"id":"d0c5bbf14f5febca"}
-}
-----
-You are interested in the https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html#field-trace-id[trace.id] field, which is a unique identifier of a trace. The `trace.id` provides a way to group multiple events, like transactions, which belong together. You can search for `"trace":{"id":"9b99131a6f66587971ef085ef97dfd07"}` to get all the logs that belong to the same trace. This enables you to see how many {es} requests were triggered during the `9b99131a6f66587971ef085ef97dfd07` trace, what they looked like, what {es} endpoints were hit, and so on.
diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc
index e997c0bc68cde..3d9de2d35b500 100644
--- a/docs/developer/plugin-list.asciidoc
+++ b/docs/developer/plugin-list.asciidoc
@@ -540,6 +540,11 @@ Elastic.
|Add tagging capability to saved objects
+|{kib-repo}blob/{branch}/x-pack/plugins/screenshotting/README.md[screenshotting]
+|This plugin provides functionality to take screenshots of the Kibana pages.
+It uses Chromium and Puppeteer underneath to run the browser in headless mode.
+
+
|{kib-repo}blob/{branch}/x-pack/plugins/searchprofiler/README.md[searchprofiler]
|The search profiler consumes the Profile API
by sending a search API with profile: true enabled in the request body. The response contains
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
index 403d8594999a7..63c29df44019d 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
@@ -88,6 +88,7 @@ readonly links: {
readonly usersAccess: string;
};
readonly workplaceSearch: {
+ readonly apiKeys: string;
readonly box: string;
readonly confluenceCloud: string;
readonly confluenceServer: string;
@@ -289,7 +290,14 @@ readonly links: {
}>;
readonly watcher: Record;
readonly ccs: Record;
- readonly plugins: Record;
+ readonly plugins: {
+ azureRepo: string;
+ gcsRepo: string;
+ hdfsRepo: string;
+ s3Repo: string;
+ snapshotRestoreRepos: string;
+ mapperSize: string;
+ };
readonly snapshotRestore: Record;
readonly ingest: Record;
readonly fleet: Readonly<{
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
index 131d4452c980c..a9828f04672e9 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
@@ -17,5 +17,5 @@ export interface DocLinksStart
| --- | --- | --- |
| [DOC\_LINK\_VERSION](./kibana-plugin-core-public.doclinksstart.doc_link_version.md) | string | |
| [ELASTIC\_WEBSITE\_URL](./kibana-plugin-core-public.doclinksstart.elastic_website_url.md) | string | |
-| [links](./kibana-plugin-core-public.doclinksstart.links.md) | { readonly settings: string; readonly elasticStackGetStarted: string; readonly upgrade: { readonly upgradingElasticStack: string; }; readonly apm: { readonly kibanaSettings: string; readonly supportedServiceMaps: string; readonly customLinks: string; readonly droppedTransactionSpans: string; readonly upgrading: string; readonly metaData: string; }; readonly canvas: { readonly guide: string; }; readonly cloud: { readonly indexManagement: string; }; readonly dashboard: { readonly guide: string; readonly drilldowns: string; readonly drilldownsTriggerPicker: string; readonly urlDrilldownTemplateSyntax: string; readonly urlDrilldownVariables: string; }; readonly discover: Record<string, string>; readonly filebeat: { readonly base: string; readonly installation: string; readonly configuration: string; readonly elasticsearchOutput: string; readonly elasticsearchModule: string; readonly startup: string; readonly exportedFields: string; readonly suricataModule: string; readonly zeekModule: string; }; readonly auditbeat: { readonly base: string; readonly auditdModule: string; readonly systemModule: string; }; readonly metricbeat: { readonly base: string; readonly configure: string; readonly httpEndpoint: string; readonly install: string; readonly start: string; }; readonly appSearch: { readonly apiRef: string; readonly apiClients: string; readonly apiKeys: string; readonly authentication: string; readonly crawlRules: string; readonly curations: string; readonly duplicateDocuments: string; readonly entryPoints: string; readonly guide: string; readonly indexingDocuments: string; readonly indexingDocumentsSchema: string; readonly logSettings: string; readonly metaEngines: string; readonly precisionTuning: string; readonly relevanceTuning: string; readonly resultSettings: string; readonly searchUI: string; readonly security: string; readonly synonyms: string; readonly webCrawler: string; readonly webCrawlerEventLogs: string; }; readonly enterpriseSearch: { readonly configuration: string; readonly licenseManagement: string; readonly mailService: string; readonly usersAccess: string; }; readonly workplaceSearch: { readonly box: string; readonly confluenceCloud: string; readonly confluenceServer: string; readonly customSources: string; readonly customSourcePermissions: string; readonly documentPermissions: string; readonly dropbox: string; readonly externalIdentities: string; readonly gitHub: string; readonly gettingStarted: string; readonly gmail: string; readonly googleDrive: string; readonly indexingSchedule: string; readonly jiraCloud: string; readonly jiraServer: string; readonly oneDrive: string; readonly permissions: string; readonly salesforce: string; readonly security: string; readonly serviceNow: string; readonly sharePoint: string; readonly slack: string; readonly synch: string; readonly zendesk: string; }; readonly heartbeat: { readonly base: string; }; readonly libbeat: { readonly getStarted: string; }; readonly logstash: { readonly base: string; }; readonly functionbeat: { readonly base: string; }; readonly winlogbeat: { readonly base: string; }; readonly aggs: { readonly composite: string; readonly composite\_missing\_bucket: string; readonly date\_histogram: string; readonly date\_range: string; readonly date\_format\_pattern: string; readonly filter: string; readonly filters: string; readonly geohash\_grid: string; readonly histogram: string; readonly ip\_range: string; readonly range: string; readonly significant\_terms: string; readonly terms: string; readonly terms\_doc\_count\_error: string; readonly avg: string; readonly avg\_bucket: string; readonly max\_bucket: string; readonly min\_bucket: string; readonly sum\_bucket: string; readonly cardinality: string; readonly count: string; readonly cumulative\_sum: string; readonly derivative: string; readonly geo\_bounds: string; readonly geo\_centroid: string; readonly max: string; readonly median: string; readonly min: string; readonly moving\_avg: string; readonly percentile\_ranks: string; readonly serial\_diff: string; readonly std\_dev: string; readonly sum: string; readonly top\_hits: string; }; readonly runtimeFields: { readonly overview: string; readonly mapping: string; }; readonly scriptedFields: { readonly scriptFields: string; readonly scriptAggs: string; readonly painless: string; readonly painlessApi: string; readonly painlessLangSpec: string; readonly painlessSyntax: string; readonly painlessWalkthrough: string; readonly luceneExpressions: string; }; readonly search: { readonly sessions: string; readonly sessionLimits: string; }; readonly indexPatterns: { readonly introduction: string; readonly fieldFormattersNumber: string; readonly fieldFormattersString: string; readonly runtimeFields: string; }; readonly addData: string; readonly kibana: string; readonly upgradeAssistant: { readonly overview: string; readonly batchReindex: string; readonly remoteReindex: string; }; readonly rollupJobs: string; readonly elasticsearch: Record<string, string>; readonly siem: { readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; readonly ruleChangeLog: string; readonly detectionsReq: string; readonly networkMap: string; readonly troubleshootGaps: string; }; readonly securitySolution: { readonly trustedApps: string; }; readonly query: { readonly eql: string; readonly kueryQuerySyntax: string; readonly luceneQuerySyntax: string; readonly percolate: string; readonly queryDsl: string; }; readonly date: { readonly dateMath: string; readonly dateMathIndexNames: string; }; readonly management: Record<string, string>; readonly ml: Record<string, string>; readonly transforms: Record<string, string>; readonly visualize: Record<string, string>; readonly apis: Readonly<{ bulkIndexAlias: string; byteSizeUnits: string; createAutoFollowPattern: string; createFollower: string; createIndex: string; createSnapshotLifecyclePolicy: string; createRoleMapping: string; createRoleMappingTemplates: string; createRollupJobsRequest: string; createApiKey: string; createPipeline: string; createTransformRequest: string; cronExpressions: string; executeWatchActionModes: string; indexExists: string; openIndex: string; putComponentTemplate: string; painlessExecute: string; painlessExecuteAPIContexts: string; putComponentTemplateMetadata: string; putSnapshotLifecyclePolicy: string; putIndexTemplateV1: string; putWatch: string; simulatePipeline: string; timeUnits: string; updateTransform: string; }>; readonly observability: Readonly<{ guide: string; infrastructureThreshold: string; logsThreshold: string; metricsThreshold: string; monitorStatus: string; monitorUptime: string; tlsCertificate: string; uptimeDurationAnomaly: string; }>; readonly alerting: Record<string, string>; readonly maps: Readonly<{ guide: string; importGeospatialPrivileges: string; gdalTutorial: string; }>; readonly monitoring: Record<string, string>; readonly security: Readonly<{ apiKeyServiceSettings: string; clusterPrivileges: string; elasticsearchSettings: string; elasticsearchEnableSecurity: string; elasticsearchEnableApiKeys: string; indicesPrivileges: string; kibanaTLS: string; kibanaPrivileges: string; mappingRoles: string; mappingRolesFieldRules: string; runAsPrivilege: string; }>; readonly spaces: Readonly<{ kibanaLegacyUrlAliases: string; kibanaDisableLegacyUrlAliasesApi: string; }>; readonly watcher: Record<string, string>; readonly ccs: Record<string, string>; readonly plugins: Record<string, string>; readonly snapshotRestore: Record<string, string>; readonly ingest: Record<string, string>; readonly fleet: Readonly<{ beatsAgentComparison: string; guide: string; fleetServer: string; fleetServerAddFleetServer: string; settings: string; settingsFleetServerHostSettings: string; settingsFleetServerProxySettings: string; troubleshooting: string; elasticAgent: string; datastreams: string; datastreamsNamingScheme: string; installElasticAgent: string; installElasticAgentStandalone: string; upgradeElasticAgent: string; upgradeElasticAgent712lower: string; learnMoreBlog: string; apiKeysLearnMore: string; onPremRegistry: string; }>; readonly ecs: { readonly guide: string; }; readonly clients: { readonly guide: string; readonly goOverview: string; readonly javaIndex: string; readonly jsIntro: string; readonly netGuide: string; readonly perlGuide: string; readonly phpGuide: string; readonly pythonGuide: string; readonly rubyOverview: string; readonly rustGuide: string; }; readonly endpoints: { readonly troubleshooting: string; }; } | |
+| [links](./kibana-plugin-core-public.doclinksstart.links.md) | { readonly settings: string; readonly elasticStackGetStarted: string; readonly upgrade: { readonly upgradingElasticStack: string; }; readonly apm: { readonly kibanaSettings: string; readonly supportedServiceMaps: string; readonly customLinks: string; readonly droppedTransactionSpans: string; readonly upgrading: string; readonly metaData: string; }; readonly canvas: { readonly guide: string; }; readonly cloud: { readonly indexManagement: string; }; readonly dashboard: { readonly guide: string; readonly drilldowns: string; readonly drilldownsTriggerPicker: string; readonly urlDrilldownTemplateSyntax: string; readonly urlDrilldownVariables: string; }; readonly discover: Record<string, string>; readonly filebeat: { readonly base: string; readonly installation: string; readonly configuration: string; readonly elasticsearchOutput: string; readonly elasticsearchModule: string; readonly startup: string; readonly exportedFields: string; readonly suricataModule: string; readonly zeekModule: string; }; readonly auditbeat: { readonly base: string; readonly auditdModule: string; readonly systemModule: string; }; readonly metricbeat: { readonly base: string; readonly configure: string; readonly httpEndpoint: string; readonly install: string; readonly start: string; }; readonly appSearch: { readonly apiRef: string; readonly apiClients: string; readonly apiKeys: string; readonly authentication: string; readonly crawlRules: string; readonly curations: string; readonly duplicateDocuments: string; readonly entryPoints: string; readonly guide: string; readonly indexingDocuments: string; readonly indexingDocumentsSchema: string; readonly logSettings: string; readonly metaEngines: string; readonly recisionTuning: string; readonly relevanceTuning: string; readonly resultSettings: string; readonly searchUI: string; readonly security: string; readonly synonyms: string; readonly webCrawler: string; readonly webCrawlerEventLogs: string; }; readonly enterpriseSearch: { readonly configuration: string; readonly licenseManagement: string; readonly mailService: string; readonly usersAccess: string; }; readonly workplaceSearch: { readonly apiKeys: string; readonly box: string; readonly confluenceCloud: string; readonly confluenceServer: string; readonly customSources: string; readonly customSourcePermissions: string; readonly documentPermissions: string; readonly dropbox: string; readonly externalIdentities: string; readonly gitHub: string; readonly gettingStarted: string; readonly gmail: string; readonly googleDrive: string; readonly indexingSchedule: string; readonly jiraCloud: string; readonly jiraServer: string; readonly oneDrive: string; readonly permissions: string; readonly salesforce: string; readonly security: string; readonly serviceNow: string; readonly sharePoint: string; readonly slack: string; readonly synch: string; readonly zendesk: string; }; readonly heartbeat: { readonly base: string; }; readonly libbeat: { readonly getStarted: string; }; readonly logstash: { readonly base: string; }; readonly functionbeat: { readonly base: string; }; readonly winlogbeat: { readonly base: string; }; readonly aggs: { readonly composite: string; readonly composite\_missing\_bucket: string; readonly date\_histogram: string; readonly date\_range: string; readonly date\_format\_pattern: string; readonly filter: string; readonly filters: string; readonly geohash\_grid: string; readonly histogram: string; readonly ip\_range: string; readonly range: string; readonly significant\_terms: string; readonly terms: string; readonly terms\_doc\_count\_error: string; readonly avg: string; readonly avg\_bucket: string; readonly max\_bucket: string; readonly min\_bucket: string; readonly sum\_bucket: string; readonly cardinality: string; readonly count: string; readonly cumulative\_sum: string; readonly derivative: string; readonly geo\_bounds: string; readonly geo\_centroid: string; readonly max: string; readonly median: string; readonly min: string; readonly moving\_avg: string; readonly percentile\_ranks: string; readonly serial\_diff: string; readonly std\_dev: string; readonly sum: string; readonly top\_hits: string; }; readonly runtimeFields: { readonly overview: string; readonly mapping: string; }; readonly scriptedFields: { readonly scriptFields: string; readonly scriptAggs: string; readonly painless: string; readonly painlessApi: string; readonly painlessLangSpec: string; readonly painlessSyntax: string; readonly painlessWalkthrough: string; readonly luceneExpressions: string; }; readonly search: { readonly sessions: string; readonly sessionLimits: string; }; readonly indexPatterns: { readonly introduction: string; readonly fieldFormattersNumber: string; readonly fieldFormattersString: string; readonly runtimeFields: string; }; readonly addData: string; readonly kibana: string; readonly upgradeAssistant: { readonly overview: string; readonly batchReindex: string; readonly remoteReindex: string; }; readonly rollupJobs: string; readonly elasticsearch: Record<string, string>; readonly siem: { readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; readonly ruleChangeLog: string; readonly detectionsReq: string; readonly networkMap: string; readonly troubleshootGaps: string; }; readonly securitySolution: { readonly trustedApps: string; }; readonly query: { readonly eql: string; readonly kueryQuerySyntax: string; readonly luceneQuerySyntax: string; readonly percolate: string; readonly queryDsl: string; }; readonly date: { readonly dateMath: string; readonly dateMathIndexNames: string; }; readonly management: Record<string, string>; readonly ml: Record<string, string>; readonly transforms: Record<string, string>; readonly visualize: Record<string, string>; readonly apis: Readonly<{ bulkIndexAlias: string; byteSizeUnits: string; createAutoFollowPattern: string; createFollower: string; createIndex: string; createSnapshotLifecyclePolicy: string; createRoleMapping: string; createRoleMappingTemplates: string; createRollupJobsRequest: string; createApiKey: string; createPipeline: string; createTransformRequest: string; cronExpressions: string; executeWatchActionModes: string; indexExists: string; openIndex: string; putComponentTemplate: string; painlessExecute: string; painlessExecuteAPIContexts: string; putComponentTemplateMetadata: string; putSnapshotLifecyclePolicy: string; putIndexTemplateV1: string; putWatch: string; simulatePipeline: string; timeUnits: string; updateTransform: string; }>; readonly observability: Readonly<{ guide: string; infrastructureThreshold: string; logsThreshold: string; metricsThreshold: string; monitorStatus: string; monitorUptime: string; tlsCertificate: string; uptimeDurationAnomaly: string; }>; readonly alerting: Record<string, string>; readonly maps: Readonly<{ guide: string; importGeospatialPrivileges: string; gdalTutorial: string; }>; readonly monitoring: Record<string, string>; readonly security: Readonly<{ apiKeyServiceSettings: string; clusterPrivileges: string; elasticsearchSettings: string; elasticsearchEnableSecurity: string; elasticsearchEnableApiKeys: string; indicesPrivileges: string; kibanaTLS: string; kibanaPrivileges: string; mappingRoles: string; mappingRolesFieldRules: string; runAsPrivilege: string; }>; readonly spaces: Readonly<{ kibanaLegacyUrlAliases: string; kibanaDisableLegacyUrlAliasesApi: string; }>; readonly watcher: Record<string, string>; readonly ccs: Record<string, string>; readonly plugins: Record<string, string>; readonly snapshotRestore: Record<string, string>; readonly ingest: Record<string, string>; readonly fleet: Readonly<{ beatsAgentComparison: string; guide: string; fleetServer: string; fleetServerAddFleetServer: string; settings: string; settingsFleetServerHostSettings: string; settingsFleetServerProxySettings: string; troubleshooting: string; elasticAgent: string; datastreams: string; datastreamsNamingScheme: string; installElasticAgent: string; installElasticAgentStandalone: string; upgradeElasticAgent: string; upgradeElasticAgent712lower: string; learnMoreBlog: string; apiKeysLearnMore: string; onPremRegistry: string; }>; readonly ecs: { readonly guide: string; }; readonly clients: { readonly guide: string; readonly goOverview: string; readonly javaIndex: string; readonly jsIntro: string; readonly netGuide: string; readonly perlGuide: string; readonly phpGuide: string; readonly pythonGuide: string; readonly rubyOverview: string; readonly rustGuide: string; }; readonly endpoints: { readonly troubleshooting: string; }; } | |
diff --git a/docs/osquery/osquery.asciidoc b/docs/osquery/osquery.asciidoc
index 396135d8d1751..500dc6959fc00 100644
--- a/docs/osquery/osquery.asciidoc
+++ b/docs/osquery/osquery.asciidoc
@@ -288,13 +288,21 @@ This is useful for teams who need in-depth and detailed control.
[float]
=== Customize Osquery configuration
-By default, all Osquery Manager integrations share the same osquery configuration. However, you can customize how Osquery is configured by editing the Osquery Manager integration for each agent policy
+experimental[] By default, all Osquery Manager integrations share the same osquery configuration. However, you can customize how Osquery is configured by editing the Osquery Manager integration for each agent policy
you want to adjust. The custom configuration is then applied to all agents in the policy.
This powerful feature allows you to configure
https://osquery.readthedocs.io/en/stable/deployment/file-integrity-monitoring[File Integrity Monitoring], https://osquery.readthedocs.io/en/stable/deployment/process-auditing[Process auditing],
and https://osquery.readthedocs.io/en/stable/deployment/configuration/#configuration-specification[others].
-IMPORTANT: Take caution when editing this configuration. The changes you make are distributed to all agents in the policy.
+[IMPORTANT]
+=========================
+
+* Take caution when editing this configuration. The changes you make are distributed to all agents in the policy.
+
+* Take caution when editing `packs` using the Advanced *Osquery config* field.
+Any changes you make to `packs` from this field are not reflected in the UI on the Osquery *Packs* page in {kib}, however, these changes are deployed to agents in the policy.
+While this allows you to use advanced Osquery functionality like pack discovery queries, you do lose the ability to manage packs defined this way from the Osquery *Packs* page.
+=========================
. From the {kib} main menu, click *Fleet*, then the *Agent policies* tab.
@@ -315,6 +323,16 @@ IMPORTANT: Take caution when editing this configuration. The changes you make ar
* (Optional) To load a full configuration file, drag and drop an Osquery `.conf` file into the area at the bottom of the page.
. Click *Save integration* to apply the custom configuration to all agents in the policy.
++
+As an example, the following configuration disables two tables.
++
+```ts
+{
+ "options":{
+ "disable_tables":"curl,process_envs"
+ }
+}
+```
[float]
=== Upgrade Osquery versions
diff --git a/docs/settings/apm-settings.asciidoc b/docs/settings/apm-settings.asciidoc
index 77a250a14f929..27ea7f4dc7cd0 100644
--- a/docs/settings/apm-settings.asciidoc
+++ b/docs/settings/apm-settings.asciidoc
@@ -101,8 +101,8 @@ Changing these settings may disable features of the APM App.
| `xpack.apm.indices.sourcemap` {ess-icon}
| Matcher for all source map indices. Defaults to `apm-*`.
-| `xpack.apm.autocreateApmIndexPattern` {ess-icon}
- | Set to `false` to disable the automatic creation of the APM index pattern when the APM app is opened. Defaults to `true`.
+| `xpack.apm.autoCreateApmDataView` {ess-icon}
+ | Set to `false` to disable the automatic creation of the APM data view when the APM app is opened. Defaults to `true`.
|===
-// end::general-apm-settings[]
\ No newline at end of file
+// end::general-apm-settings[]
diff --git a/docs/settings/fleet-settings.asciidoc b/docs/settings/fleet-settings.asciidoc
index f0dfeb619bb38..a088f31937cc8 100644
--- a/docs/settings/fleet-settings.asciidoc
+++ b/docs/settings/fleet-settings.asciidoc
@@ -87,6 +87,7 @@ Optional properties are:
`data_output_id`:: ID of the output to send data (Need to be identical to `monitoring_output_id`)
`monitoring_output_id`:: ID of the output to send monitoring data. (Need to be identical to `data_output_id`)
`package_policies`:: List of integration policies to add to this policy.
+ `id`::: Unique ID of the integration policy. The ID may be a number or string.
`name`::: (required) Name of the integration policy.
`package`::: (required) Integration that this policy configures
`name`:::: Name of the integration associated with this policy.
@@ -128,6 +129,7 @@ xpack.fleet.agentPolicies:
- package:
name: system
name: System Integration
+ id: preconfigured-system
inputs:
- type: system/metrics
enabled: true
diff --git a/docs/settings/monitoring-settings.asciidoc b/docs/settings/monitoring-settings.asciidoc
index d8bc26b7b3987..8bc98a028b8f6 100644
--- a/docs/settings/monitoring-settings.asciidoc
+++ b/docs/settings/monitoring-settings.asciidoc
@@ -72,6 +72,9 @@ For more information, see
| `monitoring.ui.elasticsearch.ssl`
| Shares the same configuration as <>. These settings configure encrypted communication between {kib} and the monitoring cluster.
+| `monitoring.cluster_alerts.allowedSpaces` {ess-icon}
+ | Specifies the spaces where cluster Stack Monitoring alerts can be created. You must specify all spaces where you want to generate alerts, including the default space. Defaults to `[ "default" ]`.
+
|===
[float]
diff --git a/docs/settings/spaces-settings.asciidoc b/docs/settings/spaces-settings.asciidoc
index dd37943101145..3eb91a0d884ef 100644
--- a/docs/settings/spaces-settings.asciidoc
+++ b/docs/settings/spaces-settings.asciidoc
@@ -12,11 +12,3 @@ The maximum number of spaces that you can use with the {kib} instance. Some {kib
return all spaces using a single `_search` from {es}, so you must
configure this setting lower than the `index.max_result_window` in {es}.
The default is `1000`.
-
-`monitoring.cluster_alerts.allowedSpaces` {ess-icon}::
-Specifies the spaces where cluster alerts are automatically generated.
-You must specify all spaces where you want to generate alerts, including the default space.
-When the default space is unspecified, {kib} is unable to generate an alert for the default space.
-{es} clusters that run on {es} services are all containers. To send monitoring data
-from your self-managed {es} installation to {es} services, set to `false`.
-The default is `true`.
diff --git a/docs/settings/url-drilldown-settings.asciidoc b/docs/settings/url-drilldown-settings.asciidoc
index 702829ec34dcc..36dbabbe7fe1e 100644
--- a/docs/settings/url-drilldown-settings.asciidoc
+++ b/docs/settings/url-drilldown-settings.asciidoc
@@ -6,16 +6,13 @@
Configure the URL drilldown settings in your `kibana.yml` configuration file.
-[cols="2*<"]
-|===
-| [[external-URL-policy]] `externalUrl.policy`
- | Configures the external URL policies. URL drilldowns respect the global *External URL* service, which you can use to deny or allow external URLs.
+[[external-URL-policy]] `externalUrl.policy`::
+Configures the external URL policies. URL drilldowns respect the global *External URL* service, which you can use to deny or allow external URLs.
By default all external URLs are allowed.
-|===
-
-For example, to allow external URLs only to the `example.com` domain with the `https` scheme, except for the `danger.example.com` sub-domain,
++
+For example, to allow only external URLs to the `example.com` domain with the `https` scheme, except for the `danger.example.com` sub-domain,
which is denied even when `https` scheme is used:
-
++
["source","yml"]
-----------
externalUrl.policy:
@@ -25,4 +22,3 @@ externalUrl.policy:
host: example.com
protocol: https
-----------
-
diff --git a/docs/setup/docker.asciidoc b/docs/setup/docker.asciidoc
index 3acaf2ddd2c12..0aa6c680a7761 100644
--- a/docs/setup/docker.asciidoc
+++ b/docs/setup/docker.asciidoc
@@ -14,10 +14,13 @@ https://github.com/elastic/dockerfiles/tree/{branch}/kibana[GitHub].
These images contain both free and subscription features.
<> to try out all of the features.
-[float]
+[discrete]
[[run-kibana-on-docker-for-dev]]
=== Run {kib} on Docker for development
+. Start an {es} container for development or testing:
++
+--
ifeval::["{release-state}"=="unreleased"]
NOTE: No Docker images are currently available for {kib} {version}.
@@ -26,14 +29,16 @@ endif::[]
ifeval::["{release-state}"!="unreleased"]
-. Start an {es} container for development or testing:
-+
[source,sh,subs="attributes"]
----
docker network create elastic
docker pull {es-docker-image}
docker run --name es-node01 --net elastic -p 9200:9200 -p 9300:9300 -t {es-docker-image}
----
+
+endif::[]
+
+--
+
When you start {es} for the first time, the following security configuration
occurs automatically:
@@ -51,30 +56,26 @@ and enrollment token.
. Copy the generated password and enrollment token and save them in a secure
location. These values are shown only when you start {es} for the first time.
You'll use these to enroll {kib} with your {es} cluster and log in.
+
+. In a new terminal session, start {kib} and connect it to your {es} container:
+
-[NOTE]
-====
-If you need to reset the password for the `elastic` user or other
-built-in users, run the {ref}/reset-password.html[`elasticsearch-reset-password`]
-tool. To generate new enrollment tokens for {kib} or {es} nodes, run the
-{ref}/create-enrollment-token.html[`elasticsearch-create-enrollment-token`] tool.
-These tools are available in the {es} `bin` directory of the Docker container.
+--
+ifeval::["{release-state}"=="unreleased"]
-For example:
+NOTE: No Docker images are currently available for {kib} {version}.
-[source,sh]
-----
-docker exec -it es-node01 /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic
-----
-====
+endif::[]
+
+ifeval::["{release-state}"!="unreleased"]
-. In a new terminal session, start {kib} and connect it to your {es} container:
-+
[source,sh,subs="attributes"]
----
docker pull {docker-image}
docker run --name kib-01 --net elastic -p 5601:5601 {docker-image}
----
+
+endif::[]
+--
+
When you start {kib}, a unique link is output to your terminal.
@@ -86,7 +87,32 @@ When you start {kib}, a unique link is output to your terminal.
.. Log in to {kib} as the `elastic` user with the password that was generated
when you started {es}.
-[float]
+[[docker-generate]]
+[discrete]
+=== Generate passwords and enrollment tokens
+If you need to reset the password for the `elastic` user or other
+built-in users, run the {ref}/reset-password.html[`elasticsearch-reset-password`]
+tool. This tool is available in the {es} `bin` directory of the Docker container.
+
+For example, to reset the password for the `elastic` user:
+
+[source,sh]
+----
+docker exec -it es-node01 /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic
+----
+
+If you need to generate new enrollment tokens for {kib} or {es} nodes, run the
+{ref}/create-enrollment-token.html[`elasticsearch-create-enrollment-token`] tool.
+This tool is available in the {es} `bin` directory of the Docker container.
+
+For example, to generate a new enrollment token for {kib}:
+
+[source,sh]
+----
+docker exec -it es-node01 /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s kibana
+----
+
+[discrete]
=== Remove Docker containers
To remove the containers and their network, run:
@@ -98,8 +124,7 @@ docker rm es-node01
docker rm kib-01
----
-endif::[]
-[float]
+[discrete]
[[configuring-kibana-docker]]
=== Configure Kibana on Docker
@@ -108,7 +133,7 @@ conventional approach is to provide a `kibana.yml` file as described in
{kibana-ref}/settings.html[Configuring Kibana], but it's also possible to use
environment variables to define settings.
-[float]
+[discrete]
[[bind-mount-config]]
==== Bind-mounted configuration
@@ -127,7 +152,7 @@ services:
==== Persist the {kib} keystore
-By default, {kib] auto-generates a keystore file for secure settings at startup. To persist your {kibana-ref}/secure-settings.html[secure settings], use the `kibana-keystore` utility to bind-mount the parent directory of the keystore to the container. For example:
+By default, {kib} auto-generates a keystore file for secure settings at startup. To persist your {kibana-ref}/secure-settings.html[secure settings], use the `kibana-keystore` utility to bind-mount the parent directory of the keystore to the container. For example:
["source","sh",subs="attributes"]
----
@@ -135,7 +160,7 @@ docker run -it --rm -v full_path_to/config:/usr/share/kibana/config -v full_path
docker run -it --rm -v full_path_to/config:/usr/share/kibana/config -v full_path_to/data:/usr/share/kibana/data {docker-image} bin/kibana-keystore add test_keystore_setting
----
-[float]
+[discrete]
[[environment-variable-config]]
==== Environment variable configuration
@@ -179,7 +204,7 @@ services:
Since environment variables are translated to CLI arguments, they take
precedence over settings configured in `kibana.yml`.
-[float]
+[discrete]
[[docker-defaults]]
==== Docker defaults
The following settings have different default values when using the Docker
diff --git a/docs/setup/install/deb.asciidoc b/docs/setup/install/deb.asciidoc
index 3f600d7c2bdbc..8e8c43ff8a15d 100644
--- a/docs/setup/install/deb.asciidoc
+++ b/docs/setup/install/deb.asciidoc
@@ -188,9 +188,9 @@ locations for a Debian-based system:
| path.data
| logs
- | Logs files location
- | /var/log/kibana
- | path.logs
+ | Logs files location
+ | /var/log/kibana
+ | path.logs
| plugins
| Plugin files location. Each plugin will be contained in a subdirectory.
diff --git a/docs/setup/install/rpm.asciidoc b/docs/setup/install/rpm.asciidoc
index 329af9af0ccf7..0ef714c73b9ba 100644
--- a/docs/setup/install/rpm.asciidoc
+++ b/docs/setup/install/rpm.asciidoc
@@ -174,7 +174,6 @@ locations for an RPM-based system:
| Configuration files including `kibana.yml`
| /etc/kibana
| <>
- d|
| data
| The location of the data files written to disk by Kibana and its plugins
@@ -182,9 +181,9 @@ locations for an RPM-based system:
| path.data
| logs
- | Logs files location
- | /var/log/kibana
- | path.logs
+ | Logs files location
+ | /var/log/kibana
+ | path.logs
| plugins
| Plugin files location. Each plugin will be contained in a subdirectory.
diff --git a/docs/setup/install/targz.asciidoc b/docs/setup/install/targz.asciidoc
index d9849811a7455..1d8c61a6e9a07 100644
--- a/docs/setup/install/targz.asciidoc
+++ b/docs/setup/install/targz.asciidoc
@@ -125,7 +125,7 @@ important data later on.
| home
| Kibana home directory or `$KIBANA_HOME`
d| Directory created by unpacking the archive
- d|
+ |
| bin
| Binary scripts including `kibana` to start the Kibana server
@@ -137,7 +137,6 @@ important data later on.
| Configuration files including `kibana.yml`
| $KIBANA_HOME\config
| <>
- d|
| data
| The location of the data files written to disk by Kibana and its plugins
diff --git a/docs/setup/upgrade.asciidoc b/docs/setup/upgrade.asciidoc
index a139b8a50ca4d..c828b837d8efd 100644
--- a/docs/setup/upgrade.asciidoc
+++ b/docs/setup/upgrade.asciidoc
@@ -44,13 +44,20 @@ a|
[[upgrade-before-you-begin]]
=== Before you begin
-WARNING: {kib} automatically runs upgrade migrations when required. To roll back to an earlier version in case of an upgrade failure, you **must** have a {ref}/snapshot-restore.html[backup snapshot] available. This snapshot must include the `kibana` feature state or all `kibana*` indices. For more information see <>.
+[WARNING]
+====
+{kib} automatically runs upgrade migrations when required. To roll back to an
+earlier version in case of an upgrade failure, you **must** have a
+{ref}/snapshot-restore.html[backup snapshot] that includes the `kibana` feature
+state. Snapshots include this feature state by default.
+
+For more information, refer to <>.
+====
Before you upgrade {kib}:
* Consult the <>.
-* {ref}/snapshots-take-snapshot.html[Take a snapshot] of your data. To roll back to an earlier version, the snapshot must include the `kibana` feature state or all `.kibana*` indices.
-* Although not a requirement for rollbacks, we recommend taking a snapshot of all {kib} indices created by the plugins you use such as the `.reporting*` indices created by the reporting plugin.
+* {ref}/snapshots-take-snapshot.html[Take a snapshot] of your data. To roll back to an earlier version, the snapshot must include the `kibana` feature state.
* Before you upgrade production servers, test the upgrades in a dev environment.
* See <> for common reasons upgrades fail and how to prevent these.
* If you are using custom plugins, check that a compatible version is
diff --git a/docs/setup/upgrade/upgrade-migrations.asciidoc b/docs/setup/upgrade/upgrade-migrations.asciidoc
index c47c2c1745e94..e9e1b757fd71d 100644
--- a/docs/setup/upgrade/upgrade-migrations.asciidoc
+++ b/docs/setup/upgrade/upgrade-migrations.asciidoc
@@ -151,17 +151,18 @@ In order to rollback after a failed upgrade migration, the saved object indices
[float]
===== Rollback by restoring a backup snapshot:
-1. Before proceeding, {ref}/snapshots-take-snapshot.html[take a snapshot] that contains the `kibana` feature state or all `.kibana*` indices.
+1. Before proceeding, {ref}/snapshots-take-snapshot.html[take a snapshot] that contains the `kibana` feature state.
+ Snapshots include this feature state by default.
2. Shutdown all {kib} instances to be 100% sure that there are no instances currently performing a migration.
3. Delete all saved object indices with `DELETE /.kibana*`
-4. {ref}/snapshots-restore-snapshot.html[Restore] the `kibana` feature state or all `.kibana* indices and their aliases from the snapshot.
+4. {ref}/snapshots-restore-snapshot.html[Restore] the `kibana` feature state from the snapshot.
5. Start up all {kib} instances on the older version you wish to rollback to.
[float]
===== (Not recommended) Rollback without a backup snapshot:
1. Shutdown all {kib} instances to be 100% sure that there are no {kib} instances currently performing a migration.
-2. {ref}/snapshots-take-snapshot.html[Take a snapshot] that includes the `kibana` feature state or all `.kibana*` indices.
+2. {ref}/snapshots-take-snapshot.html[Take a snapshot] that includes the `kibana` feature state. Snapshots include this feature state by default.
3. Delete the version specific indices created by the failed upgrade migration. E.g. if you wish to rollback from a failed upgrade to v7.12.0 `DELETE /.kibana_7.12.0_*,.kibana_task_manager_7.12.0_*`
4. Inspect the output of `GET /_cat/aliases`. If either the `.kibana` and/or `.kibana_task_manager` alias is missing, these will have to be created manually. Find the latest index from the output of `GET /_cat/indices` and create the missing alias to point to the latest index. E.g. if the `.kibana` alias was missing and the latest index is `.kibana_3` create a new alias with `POST /.kibana_3/_aliases/.kibana`.
5. Remove the write block from the rollback indices. `PUT /.kibana,.kibana_task_manager/_settings {"index.blocks.write": false}`
diff --git a/docs/user/index.asciidoc b/docs/user/index.asciidoc
index 75d0da1c597b6..57668b3f5bccf 100644
--- a/docs/user/index.asciidoc
+++ b/docs/user/index.asciidoc
@@ -45,3 +45,5 @@ include::management.asciidoc[]
include::api.asciidoc[]
include::plugins.asciidoc[]
+
+include::troubleshooting.asciidoc[]
diff --git a/docs/user/troubleshooting.asciidoc b/docs/user/troubleshooting.asciidoc
new file mode 100644
index 0000000000000..8b32471c98d86
--- /dev/null
+++ b/docs/user/troubleshooting.asciidoc
@@ -0,0 +1,70 @@
+[[kibana-troubleshooting]]
+== Troubleshooting
+
+=== Using {kib} server logs
+{kib} Logs is a great way to see what's going on in your application and to debug performance issues. Navigating through a large number of generated logs can be overwhelming, and following are some techniques that you can use to optimize the process.
+
+Start by defining a problem area that you are interested in. For example, you might be interested in seeing how a particular {kib} Plugin is performing, so no need to gather logs for all of {kib}. Or you might want to focus on a particular feature, such as requests from the {kib} server to the {es} server.
+Depending on your needs, you can configure {kib} to generate logs for a specific feature.
+[source,yml]
+----
+logging:
+ appenders:
+ file:
+ type: file
+ fileName: ./kibana.log
+ layout:
+ type: json
+
+### gather all the Kibana logs into a file
+logging.root:
+ appenders: [file]
+ level: all
+
+### or gather a subset of the logs
+logging.loggers:
+ ### responses to an HTTP request
+ - name: http.server.response
+ level: debug
+ appenders: [file]
+ ### result of a query to the Elasticsearch server
+ - name: elasticsearch.query
+ level: debug
+ appenders: [file]
+ ### logs generated by my plugin
+ - name: plugins.myPlugin
+ level: debug
+ appenders: [file]
+----
+WARNING: Kibana's `file` appender is configured to produce logs in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format. It's the only format that includes the meta information necessary for https://www.elastic.co/guide/en/apm/agent/nodejs/current/log-correlation.html[log correlation] out-of-the-box.
+
+The next step is to define what https://www.elastic.co/observability[observability tools] are available.
+For a better experience, set up an https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[Observability integration] provided by Elastic to debug your application with the <>
+To debug something quickly without setting up additional tooling, you can work with <>
+
+[[debugging-logs-apm-ui]]
+==== APM UI
+*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
+
+To debug {kib} with the APM UI, you must set up the APM infrastructure. You can find instructions for the setup process
+https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[on the Observability integrations page].
+
+Once you set up the APM infrastructure, you can enable the APM agent and put {kib} under load to collect APM events. To analyze the collected metrics and logs, use the APM UI as demonstrated https://www.elastic.co/guide/en/kibana/master/transactions.html#transaction-trace-sample[in the docs].
+
+[[plain-kibana-logs]]
+==== Plain {kib} logs
+*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
+
+Open {kib} Logs and search for an operation you are interested in.
+For example, suppose you want to investigate the response times for queries to the `/api/telemetry/v2/clusters/_stats` {kib} endpoint.
+Open Kibana Logs and search for the HTTP server response for the endpoint. It looks similar to the following (some fields are omitted for brevity).
+[source,json]
+----
+{
+ "message":"POST /api/telemetry/v2/clusters/_stats 200 1014ms - 43.2KB",
+ "log":{"level":"DEBUG","logger":"http.server.response"},
+ "trace":{"id":"9b99131a6f66587971ef085ef97dfd07"},
+ "transaction":{"id":"d0c5bbf14f5febca"}
+}
+----
+You are interested in the https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html#field-trace-id[trace.id] field, which is a unique identifier of a trace. The `trace.id` provides a way to group multiple events, like transactions, which belong together. You can search for `"trace":{"id":"9b99131a6f66587971ef085ef97dfd07"}` to get all the logs that belong to the same trace. This enables you to see how many {es} requests were triggered during the `9b99131a6f66587971ef085ef97dfd07` trace, what they looked like, what {es} endpoints were hit, and so on.
diff --git a/package.json b/package.json
index 374ccee71ec6a..a4f8ae69eda39 100644
--- a/package.json
+++ b/package.json
@@ -103,13 +103,12 @@
"@elastic/apm-rum": "^5.9.1",
"@elastic/apm-rum-react": "^1.3.1",
"@elastic/apm-synthtrace": "link:bazel-bin/packages/elastic-apm-synthtrace",
- "@elastic/charts": "40.0.0",
+ "@elastic/charts": "40.1.0",
"@elastic/datemath": "link:bazel-bin/packages/elastic-datemath",
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.35",
"@elastic/ems-client": "8.0.0",
- "@elastic/eui": "41.0.0",
+ "@elastic/eui": "41.2.3",
"@elastic/filesaver": "1.1.2",
- "@elastic/maki": "6.3.0",
"@elastic/node-crypto": "1.2.1",
"@elastic/numeral": "^2.5.1",
"@elastic/react-search-ui": "^1.6.0",
@@ -196,8 +195,10 @@
"archiver": "^5.2.0",
"axios": "^0.21.1",
"base64-js": "^1.3.1",
+ "bitmap-sdf": "^1.0.3",
"brace": "0.11.1",
- "broadcast-channel": "^4.5.0",
+ "broadcast-channel": "^4.7.0",
+ "canvg": "^3.0.9",
"chalk": "^4.1.0",
"cheerio": "^1.0.0-rc.10",
"chokidar": "^3.4.3",
@@ -368,7 +369,7 @@
"redux-thunks": "^1.0.0",
"regenerator-runtime": "^0.13.3",
"remark-parse": "^8.0.3",
- "remark-stringify": "^9.0.0",
+ "remark-stringify": "^8.0.3",
"require-in-the-middle": "^5.1.0",
"reselect": "^4.0.0",
"resize-observer-polyfill": "^1.5.1",
@@ -520,7 +521,6 @@
"@types/ejs": "^3.0.6",
"@types/elastic__apm-synthtrace": "link:bazel-bin/packages/elastic-apm-synthtrace/npm_module_types",
"@types/elastic__datemath": "link:bazel-bin/packages/elastic-datemath/npm_module_types",
- "@types/elasticsearch": "^5.0.33",
"@types/enzyme": "^3.10.8",
"@types/eslint": "^7.28.0",
"@types/express": "^4.17.13",
@@ -531,7 +531,6 @@
"@types/file-saver": "^2.0.0",
"@types/flot": "^0.0.31",
"@types/geojson": "7946.0.7",
- "@types/getopts": "^2.0.1",
"@types/getos": "^3.0.0",
"@types/glob": "^7.1.2",
"@types/gulp": "^4.0.6",
@@ -568,6 +567,10 @@
"@types/kbn__config": "link:bazel-bin/packages/kbn-config/npm_module_types",
"@types/kbn__config-schema": "link:bazel-bin/packages/kbn-config-schema/npm_module_types",
"@types/kbn__crypto": "link:bazel-bin/packages/kbn-crypto/npm_module_types",
+ "@types/kbn__dev-utils": "link:bazel-bin/packages/kbn-dev-utils/npm_module_types",
+ "@types/kbn__docs-utils": "link:bazel-bin/packages/kbn-docs-utils/npm_module_types",
+ "@types/kbn__es-archiver": "link:bazel-bin/packages/kbn-es-archiver/npm_module_types",
+ "@types/kbn__es-query": "link:bazel-bin/packages/kbn-es-query/npm_module_types",
"@types/kbn__i18n": "link:bazel-bin/packages/kbn-i18n/npm_module_types",
"@types/kbn__i18n-react": "link:bazel-bin/packages/kbn-i18n-react/npm_module_types",
"@types/license-checker": "15.0.0",
@@ -677,7 +680,7 @@
"babel-plugin-add-module-exports": "^1.0.4",
"babel-plugin-istanbul": "^6.1.1",
"babel-plugin-require-context-hook": "^1.0.0",
- "babel-plugin-styled-components": "^1.13.3",
+ "babel-plugin-styled-components": "^2.0.2",
"babel-plugin-transform-react-remove-prop-types": "^0.4.24",
"backport": "^5.6.6",
"callsites": "^3.1.0",
diff --git a/packages/BUILD.bazel b/packages/BUILD.bazel
index 96b1846147689..5fdaa9931bc4d 100644
--- a/packages/BUILD.bazel
+++ b/packages/BUILD.bazel
@@ -86,6 +86,10 @@ filegroup(
"//packages/kbn-config:build_types",
"//packages/kbn-config-schema:build_types",
"//packages/kbn-crypto:build_types",
+ "//packages/kbn-dev-utils:build_types",
+ "//packages/kbn-docs-utils:build_types",
+ "//packages/kbn-es-archiver:build_types",
+ "//packages/kbn-es-query:build_types",
"//packages/kbn-i18n:build_types",
"//packages/kbn-i18n-react:build_types",
],
diff --git a/packages/elastic-eslint-config-kibana/react.js b/packages/elastic-eslint-config-kibana/react.js
index 29000bdb15684..0b1cce15de9ad 100644
--- a/packages/elastic-eslint-config-kibana/react.js
+++ b/packages/elastic-eslint-config-kibana/react.js
@@ -1,5 +1,5 @@
const semver = require('semver');
-const { kibanaPackageJson: PKG } = require('@kbn/dev-utils');
+const { kibanaPackageJson: PKG } = require('@kbn/utils');
module.exports = {
plugins: [
diff --git a/packages/elastic-eslint-config-kibana/typescript.js b/packages/elastic-eslint-config-kibana/typescript.js
index 1a0ef81ae2f1e..3ada725cb1805 100644
--- a/packages/elastic-eslint-config-kibana/typescript.js
+++ b/packages/elastic-eslint-config-kibana/typescript.js
@@ -4,7 +4,7 @@
// as this package was moved from typescript-eslint-parser to @typescript-eslint/parser
const semver = require('semver');
-const { kibanaPackageJson: PKG } = require('@kbn/dev-utils');
+const { kibanaPackageJson: PKG } = require('@kbn/utils');
const eslintConfigPrettierTypescriptEslintRules = require('eslint-config-prettier/@typescript-eslint').rules;
diff --git a/packages/kbn-apm-config-loader/src/init_apm.test.ts b/packages/kbn-apm-config-loader/src/init_apm.test.ts
index 95f0a15a448c8..cabab421519bd 100644
--- a/packages/kbn-apm-config-loader/src/init_apm.test.ts
+++ b/packages/kbn-apm-config-loader/src/init_apm.test.ts
@@ -12,13 +12,13 @@ import { initApm } from './init_apm';
import apm from 'elastic-apm-node';
describe('initApm', () => {
- let apmAddFilterSpy: jest.SpyInstance;
- let apmStartSpy: jest.SpyInstance;
+ let apmAddFilterMock: jest.Mock;
+ let apmStartMock: jest.Mock;
let getConfig: jest.Mock;
beforeEach(() => {
- apmAddFilterSpy = jest.spyOn(apm, 'addFilter').mockImplementation(() => undefined);
- apmStartSpy = jest.spyOn(apm, 'start').mockImplementation(() => undefined as any);
+ apmAddFilterMock = apm.addFilter as jest.Mock;
+ apmStartMock = apm.start as jest.Mock;
getConfig = jest.fn();
mockLoadConfiguration.mockImplementation(() => ({
@@ -27,7 +27,8 @@ describe('initApm', () => {
});
afterEach(() => {
- jest.restoreAllMocks();
+ apmAddFilterMock.mockReset();
+ apmStartMock.mockReset();
mockLoadConfiguration.mockReset();
});
@@ -48,8 +49,8 @@ describe('initApm', () => {
it('registers a filter using `addFilter`', () => {
initApm(['foo', 'bar'], 'rootDir', true, 'service-name');
- expect(apmAddFilterSpy).toHaveBeenCalledTimes(1);
- expect(apmAddFilterSpy).toHaveBeenCalledWith(expect.any(Function));
+ expect(apmAddFilterMock).toHaveBeenCalledTimes(1);
+ expect(apmAddFilterMock).toHaveBeenCalledWith(expect.any(Function));
});
it('starts apm with the config returned from `getConfig`', () => {
@@ -60,7 +61,7 @@ describe('initApm', () => {
initApm(['foo', 'bar'], 'rootDir', true, 'service-name');
- expect(apmStartSpy).toHaveBeenCalledTimes(1);
- expect(apmStartSpy).toHaveBeenCalledWith(config);
+ expect(apmStartMock).toHaveBeenCalledTimes(1);
+ expect(apmStartMock).toHaveBeenCalledWith(config);
});
});
diff --git a/packages/kbn-cli-dev-mode/BUILD.bazel b/packages/kbn-cli-dev-mode/BUILD.bazel
index 66e00706e9e58..cdc40e85c972a 100644
--- a/packages/kbn-cli-dev-mode/BUILD.bazel
+++ b/packages/kbn-cli-dev-mode/BUILD.bazel
@@ -50,7 +50,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-config:npm_module_types",
"//packages/kbn-config-schema:npm_module_types",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-logging",
"//packages/kbn-optimizer",
"//packages/kbn-server-http-tools",
@@ -60,12 +60,12 @@ TYPES_DEPS = [
"@npm//chokidar",
"@npm//elastic-apm-node",
"@npm//execa",
+ "@npm//getopts",
"@npm//moment",
"@npm//rxjs",
"@npm//supertest",
"@npm//@types/hapi__h2o2",
"@npm//@types/hapi__hapi",
- "@npm//@types/getopts",
"@npm//@types/jest",
"@npm//@types/lodash",
"@npm//@types/node",
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
index e5e009e51e69e..0066644d0825a 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
@@ -8,11 +8,9 @@
import Path from 'path';
import * as Rx from 'rxjs';
-import {
- REPO_ROOT,
- createAbsolutePathSerializer,
- createAnyInstanceSerializer,
-} from '@kbn/dev-utils';
+import { createAbsolutePathSerializer, createAnyInstanceSerializer } from '@kbn/dev-utils';
+
+import { REPO_ROOT } from '@kbn/utils';
import { TestLog } from './log';
import { CliDevMode, SomeCliArgs } from './cli_dev_mode';
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
index 2396b316aa3a2..9cf688b675e67 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
@@ -22,7 +22,8 @@ import {
takeUntil,
} from 'rxjs/operators';
import { CliArgs } from '@kbn/config';
-import { REPO_ROOT, CiStatsReporter } from '@kbn/dev-utils';
+import { CiStatsReporter } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Log, CliLog } from './log';
import { Optimizer } from './optimizer';
diff --git a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
index 9fa13b013f195..25bc59bf78458 100644
--- a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
+++ b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getServerWatchPaths } from './get_server_watch_paths';
@@ -65,7 +66,7 @@ it('produces the right watch and ignore list', () => {
/x-pack/test/plugin_functional/plugins/resolver_test/target/**,
/x-pack/test/plugin_functional/plugins/resolver_test/scripts/**,
/x-pack/test/plugin_functional/plugins/resolver_test/docs/**,
- /x-pack/plugins/reporting/chromium,
+ /x-pack/plugins/screenshotting/chromium,
/x-pack/plugins/security_solution/cypress,
/x-pack/plugins/apm/scripts,
/x-pack/plugins/apm/ftr_e2e,
diff --git a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts
index e1bd431d280a4..acfc9aeecdc80 100644
--- a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts
+++ b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts
@@ -9,7 +9,7 @@
import Path from 'path';
import Fs from 'fs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
interface Options {
pluginPaths: string[];
@@ -56,7 +56,7 @@ export function getServerWatchPaths({ pluginPaths, pluginScanDirs }: Options) {
/\.(md|sh|txt)$/,
/debug\.log$/,
...pluginInternalDirsIgnore,
- fromRoot('x-pack/plugins/reporting/chromium'),
+ fromRoot('x-pack/plugins/screenshotting/chromium'),
fromRoot('x-pack/plugins/security_solution/cypress'),
fromRoot('x-pack/plugins/apm/scripts'),
fromRoot('x-pack/plugins/apm/ftr_e2e'), // prevents restarts for APM cypress tests
diff --git a/packages/kbn-config-schema/src/byte_size_value/index.test.ts b/packages/kbn-config-schema/src/byte_size_value/index.test.ts
index a5d0142853416..7a2e3a5d6cb0f 100644
--- a/packages/kbn-config-schema/src/byte_size_value/index.test.ts
+++ b/packages/kbn-config-schema/src/byte_size_value/index.test.ts
@@ -30,6 +30,11 @@ describe('parsing units', () => {
expect(ByteSizeValue.parse('1gb').getValueInBytes()).toBe(1073741824);
});
+ test('case insensitive units', () => {
+ expect(ByteSizeValue.parse('1KB').getValueInBytes()).toBe(1024);
+ expect(ByteSizeValue.parse('1Mb').getValueInBytes()).toBe(1024 * 1024);
+ });
+
test('throws an error when unsupported unit specified', () => {
expect(() => ByteSizeValue.parse('1tb')).toThrowErrorMatchingInlineSnapshot(
`"Failed to parse value as byte value. Value must be either number of bytes, or follow the format [b|kb|mb|gb] (e.g., '1024kb', '200mb', '1gb'), where the number is a safe positive integer."`
diff --git a/packages/kbn-config-schema/src/byte_size_value/index.ts b/packages/kbn-config-schema/src/byte_size_value/index.ts
index fb90bd70ed5c6..6fabe35b30024 100644
--- a/packages/kbn-config-schema/src/byte_size_value/index.ts
+++ b/packages/kbn-config-schema/src/byte_size_value/index.ts
@@ -22,7 +22,7 @@ function renderUnit(value: number, unit: string) {
export class ByteSizeValue {
public static parse(text: string): ByteSizeValue {
- const match = /([1-9][0-9]*)(b|kb|mb|gb)/.exec(text);
+ const match = /([1-9][0-9]*)(b|kb|mb|gb)/i.exec(text);
if (!match) {
const number = Number(text);
if (typeof number !== 'number' || isNaN(number)) {
@@ -35,7 +35,7 @@ export class ByteSizeValue {
}
const value = parseInt(match[1], 10);
- const unit = match[2];
+ const unit = match[2].toLowerCase();
return new ByteSizeValue(value * unitMultiplier[unit]);
}
diff --git a/packages/kbn-crypto/BUILD.bazel b/packages/kbn-crypto/BUILD.bazel
index 81ee6d770103c..f71c8b866fd5d 100644
--- a/packages/kbn-crypto/BUILD.bazel
+++ b/packages/kbn-crypto/BUILD.bazel
@@ -34,7 +34,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"@npm//@types/flot",
"@npm//@types/jest",
"@npm//@types/node",
diff --git a/packages/kbn-dev-utils/BUILD.bazel b/packages/kbn-dev-utils/BUILD.bazel
index 4fd99e0144cb6..89df1870a3cec 100644
--- a/packages/kbn-dev-utils/BUILD.bazel
+++ b/packages/kbn-dev-utils/BUILD.bazel
@@ -1,9 +1,10 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
+load("@npm//@bazel/typescript:index.bzl", "ts_config")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
PKG_BASE_NAME = "kbn-dev-utils"
PKG_REQUIRE_NAME = "@kbn/dev-utils"
+TYPES_PKG_REQUIRE_NAME = "@types/kbn__dev-utils"
SOURCE_FILES = glob(
[
@@ -43,7 +44,6 @@ NPM_MODULE_EXTRA_FILES = [
]
RUNTIME_DEPS = [
- "//packages/kbn-std",
"//packages/kbn-utils",
"@npm//@babel/core",
"@npm//axios",
@@ -66,7 +66,6 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-std",
"//packages/kbn-utils",
"@npm//@babel/parser",
"@npm//@babel/types",
@@ -124,7 +123,7 @@ ts_project(
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES,
- deps = RUNTIME_DEPS + [":target_node", ":tsc_types"],
+ deps = RUNTIME_DEPS + [":target_node"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
@@ -143,3 +142,20 @@ filegroup(
],
visibility = ["//visibility:public"],
)
+
+pkg_npm_types(
+ name = "npm_module_types",
+ srcs = SRCS,
+ deps = [":tsc_types"],
+ package_name = TYPES_PKG_REQUIRE_NAME,
+ tsconfig = ":tsconfig",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "build_types",
+ srcs = [
+ ":npm_module_types",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/kbn-dev-utils/package.json b/packages/kbn-dev-utils/package.json
index 9d6e6dde86fac..ab4f489e7d345 100644
--- a/packages/kbn-dev-utils/package.json
+++ b/packages/kbn-dev-utils/package.json
@@ -4,7 +4,6 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target_node/index.js",
- "types": "./target_types/index.d.ts",
"kibana": {
"devOnly": true
}
diff --git a/packages/kbn-dev-utils/src/index.ts b/packages/kbn-dev-utils/src/index.ts
index 381e99ac677f5..9b207ad9e9966 100644
--- a/packages/kbn-dev-utils/src/index.ts
+++ b/packages/kbn-dev-utils/src/index.ts
@@ -6,7 +6,6 @@
* Side Public License, v 1.
*/
-export * from '@kbn/utils';
export { withProcRunner, ProcRunner } from './proc_runner';
export * from './tooling_log';
export * from './serializers';
diff --git a/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap b/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap
index 7ff982acafbe4..5fa074d4c7739 100644
--- a/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap
+++ b/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap
@@ -170,6 +170,14 @@ exports[`level:warning/type:warning snapshots: output 1`] = `
"
`;
+exports[`never ignores write messages from the kibana elasticsearch.deprecation logger context 1`] = `
+" │[elasticsearch.deprecation]
+ │{ foo: { bar: { '1': [Array] } }, bar: { bar: { '1': [Array] } } }
+ │
+ │Infinity
+"
+`;
+
exports[`throws error if created with invalid level 1`] = `"Invalid log level \\"foo\\" (expected one of silent,error,warning,success,info,debug,verbose)"`;
exports[`throws error if writeTo config is not defined or doesn't have a write method 1`] = `"ToolingLogTextWriter requires the \`writeTo\` option be set to a stream (like process.stdout)"`;
diff --git a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts
index b4668f29b6e21..fbccfdcdf6ac0 100644
--- a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts
+++ b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts
@@ -88,3 +88,55 @@ it('formats %s patterns and indents multi-line messages correctly', () => {
const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
expect(output).toMatchSnapshot();
});
+
+it('does not write messages from sources in ignoreSources', () => {
+ const write = jest.fn();
+ const writer = new ToolingLogTextWriter({
+ ignoreSources: ['myIgnoredSource'],
+ level: 'debug',
+ writeTo: {
+ write,
+ },
+ });
+
+ writer.write({
+ source: 'myIgnoredSource',
+ type: 'success',
+ indent: 10,
+ args: [
+ '%s\n%O\n\n%d',
+ 'foo bar',
+ { foo: { bar: { 1: [1, 2, 3] } }, bar: { bar: { 1: [1, 2, 3] } } },
+ Infinity,
+ ],
+ });
+
+ const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
+ expect(output).toEqual('');
+});
+
+it('never ignores write messages from the kibana elasticsearch.deprecation logger context', () => {
+ const write = jest.fn();
+ const writer = new ToolingLogTextWriter({
+ ignoreSources: ['myIgnoredSource'],
+ level: 'debug',
+ writeTo: {
+ write,
+ },
+ });
+
+ writer.write({
+ source: 'myIgnoredSource',
+ type: 'write',
+ indent: 10,
+ args: [
+ '%s\n%O\n\n%d',
+ '[elasticsearch.deprecation]',
+ { foo: { bar: { 1: [1, 2, 3] } }, bar: { bar: { 1: [1, 2, 3] } } },
+ Infinity,
+ ],
+ });
+
+ const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
+ expect(output).toMatchSnapshot();
+});
diff --git a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts
index 660dae3fa1f55..4fe33241cf77e 100644
--- a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts
+++ b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts
@@ -92,7 +92,15 @@ export class ToolingLogTextWriter implements Writer {
}
if (this.ignoreSources && msg.source && this.ignoreSources.includes(msg.source)) {
- return false;
+ if (msg.type === 'write') {
+ const txt = format(msg.args[0], ...msg.args.slice(1));
+ // Ensure that Elasticsearch deprecation log messages from Kibana aren't ignored
+ if (!/elasticsearch\.deprecation/.test(txt)) {
+ return false;
+ }
+ } else {
+ return false;
+ }
}
const prefix = has(MSG_PREFIXES, msg.type) ? MSG_PREFIXES[msg.type] : '';
diff --git a/packages/kbn-docs-utils/BUILD.bazel b/packages/kbn-docs-utils/BUILD.bazel
index 6bb37b3500152..edfd3ee96c181 100644
--- a/packages/kbn-docs-utils/BUILD.bazel
+++ b/packages/kbn-docs-utils/BUILD.bazel
@@ -1,9 +1,10 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
+load("@npm//@bazel/typescript:index.bzl", "ts_config")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
PKG_BASE_NAME = "kbn-docs-utils"
PKG_REQUIRE_NAME = "@kbn/docs-utils"
+TYPES_PKG_REQUIRE_NAME = "@types/kbn__docs-utils"
SOURCE_FILES = glob(
[
@@ -37,7 +38,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-config:npm_module_types",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-utils",
"@npm//ts-morph",
"@npm//@types/dedent",
@@ -77,7 +78,7 @@ ts_project(
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES,
- deps = RUNTIME_DEPS + [":target_node", ":tsc_types"],
+ deps = RUNTIME_DEPS + [":target_node"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
@@ -96,3 +97,20 @@ filegroup(
],
visibility = ["//visibility:public"],
)
+
+pkg_npm_types(
+ name = "npm_module_types",
+ srcs = SRCS,
+ deps = [":tsc_types"],
+ package_name = TYPES_PKG_REQUIRE_NAME,
+ tsconfig = ":tsconfig",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "build_types",
+ srcs = [
+ ":npm_module_types",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/kbn-docs-utils/package.json b/packages/kbn-docs-utils/package.json
index dcff832583f59..84fc3ccb0cded 100644
--- a/packages/kbn-docs-utils/package.json
+++ b/packages/kbn-docs-utils/package.json
@@ -4,7 +4,6 @@
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": "true",
"main": "target_node/index.js",
- "types": "target_types/index.d.ts",
"kibana": {
"devOnly": true
}
diff --git a/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts b/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts
index 2e4ce08540714..3c9137b260a3e 100644
--- a/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts
+++ b/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts
@@ -9,7 +9,8 @@
import Fs from 'fs';
import Path from 'path';
-import { REPO_ROOT, run, CiStatsReporter, createFlagError } from '@kbn/dev-utils';
+import { run, CiStatsReporter, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Project } from 'ts-morph';
import { writePluginDocs } from './mdx/write_plugin_mdx_docs';
@@ -241,7 +242,7 @@ export function runBuildApiDocsCli() {
boolean: ['references'],
help: `
--plugin Optionally, run for only a specific plugin
- --stats Optionally print API stats. Must be one or more of: any, comments or exports.
+ --stats Optionally print API stats. Must be one or more of: any, comments or exports.
--references Collect references for API items
`,
},
diff --git a/packages/kbn-docs-utils/src/api_docs/find_plugins.ts b/packages/kbn-docs-utils/src/api_docs/find_plugins.ts
index 78cba3f3a9476..774452a6f1f9f 100644
--- a/packages/kbn-docs-utils/src/api_docs/find_plugins.ts
+++ b/packages/kbn-docs-utils/src/api_docs/find_plugins.ts
@@ -12,7 +12,8 @@ import globby from 'globby';
import loadJsonFile from 'load-json-file';
import { getPluginSearchPaths } from '@kbn/config';
-import { simpleKibanaPlatformPluginDiscovery, REPO_ROOT } from '@kbn/dev-utils';
+import { simpleKibanaPlatformPluginDiscovery } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ApiScope, PluginOrPackage } from './types';
export function findPlugins(): PluginOrPackage[] {
diff --git a/packages/kbn-es-archiver/BUILD.bazel b/packages/kbn-es-archiver/BUILD.bazel
index 2dc311ed74406..da8aaf913ab67 100644
--- a/packages/kbn-es-archiver/BUILD.bazel
+++ b/packages/kbn-es-archiver/BUILD.bazel
@@ -1,9 +1,10 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
+load("@npm//@bazel/typescript:index.bzl", "ts_config")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
PKG_BASE_NAME = "kbn-es-archiver"
PKG_REQUIRE_NAME = "@kbn/es-archiver"
+TYPES_PKG_REQUIRE_NAME = "@types/kbn__es-archiver"
SOURCE_FILES = glob(
[
@@ -43,7 +44,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-test",
"//packages/kbn-utils",
"@npm//@elastic/elasticsearch",
@@ -90,7 +91,7 @@ ts_project(
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES,
- deps = RUNTIME_DEPS + [":target_node", ":tsc_types"],
+ deps = RUNTIME_DEPS + [":target_node"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
@@ -109,3 +110,20 @@ filegroup(
],
visibility = ["//visibility:public"],
)
+
+pkg_npm_types(
+ name = "npm_module_types",
+ srcs = SRCS,
+ deps = [":tsc_types"],
+ package_name = TYPES_PKG_REQUIRE_NAME,
+ tsconfig = ":tsconfig",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "build_types",
+ srcs = [
+ ":npm_module_types",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/kbn-es-archiver/package.json b/packages/kbn-es-archiver/package.json
index 0cce08eaf0352..bff3990a0c1bc 100644
--- a/packages/kbn-es-archiver/package.json
+++ b/packages/kbn-es-archiver/package.json
@@ -4,7 +4,6 @@
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": "true",
"main": "target_node/index.js",
- "types": "target_types/index.d.ts",
"kibana": {
"devOnly": true
}
diff --git a/packages/kbn-es-archiver/src/actions/load.ts b/packages/kbn-es-archiver/src/actions/load.ts
index 0a7235c566b52..c5bea5e29a687 100644
--- a/packages/kbn-es-archiver/src/actions/load.ts
+++ b/packages/kbn-es-archiver/src/actions/load.ts
@@ -9,7 +9,8 @@
import { resolve, relative } from 'path';
import { createReadStream } from 'fs';
import { Readable } from 'stream';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { KbnClient } from '@kbn/test';
import type { Client } from '@elastic/elasticsearch';
import { createPromiseFromStreams, concatStreamProviders } from '@kbn/utils';
diff --git a/packages/kbn-es-archiver/src/actions/rebuild_all.ts b/packages/kbn-es-archiver/src/actions/rebuild_all.ts
index 360fdb438f2db..27fcae0c7cec5 100644
--- a/packages/kbn-es-archiver/src/actions/rebuild_all.ts
+++ b/packages/kbn-es-archiver/src/actions/rebuild_all.ts
@@ -10,8 +10,8 @@ import { resolve, relative } from 'path';
import { Stats, createReadStream, createWriteStream } from 'fs';
import { stat, rename } from 'fs/promises';
import { Readable, Writable } from 'stream';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
-import { createPromiseFromStreams } from '@kbn/utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { createPromiseFromStreams, REPO_ROOT } from '@kbn/utils';
import {
prioritizeMappings,
readDirectory,
diff --git a/packages/kbn-es-archiver/src/actions/save.ts b/packages/kbn-es-archiver/src/actions/save.ts
index 9cb5be05ac060..e5e3f06b8436d 100644
--- a/packages/kbn-es-archiver/src/actions/save.ts
+++ b/packages/kbn-es-archiver/src/actions/save.ts
@@ -10,8 +10,8 @@ import { resolve, relative } from 'path';
import { createWriteStream, mkdirSync } from 'fs';
import { Readable, Writable } from 'stream';
import type { Client } from '@elastic/elasticsearch';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
-import { createListStream, createPromiseFromStreams } from '@kbn/utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { createListStream, createPromiseFromStreams, REPO_ROOT } from '@kbn/utils';
import {
createStats,
diff --git a/packages/kbn-es-archiver/src/actions/unload.ts b/packages/kbn-es-archiver/src/actions/unload.ts
index 1c5f4cd5d7d03..22830b7289174 100644
--- a/packages/kbn-es-archiver/src/actions/unload.ts
+++ b/packages/kbn-es-archiver/src/actions/unload.ts
@@ -10,9 +10,9 @@ import { resolve, relative } from 'path';
import { createReadStream } from 'fs';
import { Readable, Writable } from 'stream';
import type { Client } from '@elastic/elasticsearch';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
import { KbnClient } from '@kbn/test';
-import { createPromiseFromStreams } from '@kbn/utils';
+import { createPromiseFromStreams, REPO_ROOT } from '@kbn/utils';
import {
isGzip,
diff --git a/packages/kbn-es-archiver/src/es_archiver.ts b/packages/kbn-es-archiver/src/es_archiver.ts
index 354197a98fa46..e13e20f25a703 100644
--- a/packages/kbn-es-archiver/src/es_archiver.ts
+++ b/packages/kbn-es-archiver/src/es_archiver.ts
@@ -10,7 +10,8 @@ import Fs from 'fs';
import Path from 'path';
import type { Client } from '@elastic/elasticsearch';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { KbnClient } from '@kbn/test';
import {
diff --git a/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
index ae21649690a99..2590074a25411 100644
--- a/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
+++ b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
@@ -6,13 +6,14 @@
* Side Public License, v 1.
*/
+import { ToolingLog } from '@kbn/dev-utils';
+
import {
createListStream,
createPromiseFromStreams,
createConcatStream,
createMapStream,
- ToolingLog,
-} from '@kbn/dev-utils';
+} from '@kbn/utils';
import { createGenerateDocRecordsStream } from './generate_doc_records_stream';
import { Progress } from '../progress';
diff --git a/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts b/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
index bcf28a4976a1c..9c0ff4a8f91ec 100644
--- a/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
+++ b/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
@@ -6,12 +6,9 @@
* Side Public License, v 1.
*/
-import {
- createListStream,
- createPromiseFromStreams,
- ToolingLog,
- createRecursiveSerializer,
-} from '@kbn/dev-utils';
+import { ToolingLog, createRecursiveSerializer } from '@kbn/dev-utils';
+
+import { createListStream, createPromiseFromStreams } from '@kbn/utils';
import { Progress } from '../progress';
import { createIndexDocRecordsStream } from './index_doc_records_stream';
diff --git a/packages/kbn-es-query/BUILD.bazel b/packages/kbn-es-query/BUILD.bazel
index 70d8d659c99fe..86f3d3ccc13a8 100644
--- a/packages/kbn-es-query/BUILD.bazel
+++ b/packages/kbn-es-query/BUILD.bazel
@@ -1,10 +1,11 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
+load("@npm//@bazel/typescript:index.bzl", "ts_config")
load("@npm//peggy:index.bzl", "peggy")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
PKG_BASE_NAME = "kbn-es-query"
PKG_REQUIRE_NAME = "@kbn/es-query"
+TYPES_PKG_REQUIRE_NAME = "@types/kbn__es-query"
SOURCE_FILES = glob(
[
@@ -104,7 +105,7 @@ ts_project(
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES + [":grammar"],
- deps = RUNTIME_DEPS + [":target_node", ":target_web", ":tsc_types"],
+ deps = RUNTIME_DEPS + [":target_node", ":target_web"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
@@ -123,3 +124,20 @@ filegroup(
],
visibility = ["//visibility:public"],
)
+
+pkg_npm_types(
+ name = "npm_module_types",
+ srcs = SRCS,
+ deps = [":tsc_types"],
+ package_name = TYPES_PKG_REQUIRE_NAME,
+ tsconfig = ":tsconfig",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "build_types",
+ srcs = [
+ ":npm_module_types",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/kbn-es-query/package.json b/packages/kbn-es-query/package.json
index 335ef61b8b360..b317ce4ca4c95 100644
--- a/packages/kbn-es-query/package.json
+++ b/packages/kbn-es-query/package.json
@@ -2,7 +2,6 @@
"name": "@kbn/es-query",
"browser": "./target_web/index.js",
"main": "./target_node/index.js",
- "types": "./target_types/index.d.ts",
"version": "1.0.0",
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": true
diff --git a/packages/kbn-es-query/src/kuery/index.ts b/packages/kbn-es-query/src/kuery/index.ts
index 868904125dc44..13039956916cb 100644
--- a/packages/kbn-es-query/src/kuery/index.ts
+++ b/packages/kbn-es-query/src/kuery/index.ts
@@ -23,4 +23,5 @@ export const toElasticsearchQuery = (...params: Parameters {
it('should return artifact metadata for the correct architecture', async () => {
const artifact = await Artifact.getSnapshot('oss', MOCK_VERSION, log);
- expect(artifact.getFilename()).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.oss`);
+ expect(artifact.spec.filename).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.oss`);
});
});
@@ -182,7 +182,7 @@ describe('Artifact', () => {
describe('with latest unverified snapshot', () => {
beforeEach(() => {
- process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = 1;
+ process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = '1';
mockFetch(MOCKS.valid);
});
diff --git a/packages/kbn-es/src/artifact.js b/packages/kbn-es/src/artifact.ts
similarity index 65%
rename from packages/kbn-es/src/artifact.js
rename to packages/kbn-es/src/artifact.ts
index 0fa2c7a1727d0..9c5935c96e8cd 100644
--- a/packages/kbn-es/src/artifact.js
+++ b/packages/kbn-es/src/artifact.ts
@@ -6,25 +6,69 @@
* Side Public License, v 1.
*/
-const fetch = require('node-fetch');
-const AbortController = require('abort-controller');
-const fs = require('fs');
-const { promisify } = require('util');
-const { pipeline, Transform } = require('stream');
-const chalk = require('chalk');
-const { createHash } = require('crypto');
-const path = require('path');
+import fs from 'fs';
+import { promisify } from 'util';
+import path from 'path';
+import { createHash } from 'crypto';
+import { pipeline, Transform } from 'stream';
+import { setTimeout } from 'timers/promises';
+
+import fetch, { Headers } from 'node-fetch';
+import AbortController from 'abort-controller';
+import chalk from 'chalk';
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { cache } from './utils/cache';
+import { resolveCustomSnapshotUrl } from './custom_snapshots';
+import { createCliError, isCliError } from './errors';
const asyncPipeline = promisify(pipeline);
const DAILY_SNAPSHOTS_BASE_URL = 'https://storage.googleapis.com/kibana-ci-es-snapshots-daily';
const PERMANENT_SNAPSHOTS_BASE_URL =
'https://storage.googleapis.com/kibana-ci-es-snapshots-permanent';
-const { cache } = require('./utils');
-const { resolveCustomSnapshotUrl } = require('./custom_snapshots');
-const { createCliError, isCliError } = require('./errors');
+type ChecksumType = 'sha512';
+export type ArtifactLicense = 'oss' | 'basic' | 'trial';
+
+interface ArtifactManifest {
+ id: string;
+ bucket: string;
+ branch: string;
+ sha: string;
+ sha_short: string;
+ version: string;
+ generated: string;
+ archives: Array<{
+ filename: string;
+ checksum: string;
+ url: string;
+ version: string;
+ platform: string;
+ architecture: string;
+ license: string;
+ }>;
+}
+
+export interface ArtifactSpec {
+ url: string;
+ checksumUrl: string;
+ checksumType: ChecksumType;
+ filename: string;
+}
+
+interface ArtifactDownloaded {
+ cached: false;
+ checksum: string;
+ etag?: string;
+ contentLength: number;
+ first500Bytes: Buffer;
+ headers: Headers;
+}
+interface ArtifactCached {
+ cached: true;
+}
-function getChecksumType(checksumUrl) {
+function getChecksumType(checksumUrl: string): ChecksumType {
if (checksumUrl.endsWith('.sha512')) {
return 'sha512';
}
@@ -32,15 +76,18 @@ function getChecksumType(checksumUrl) {
throw new Error(`unable to determine checksum type: ${checksumUrl}`);
}
-function headersToString(headers, indent = '') {
+function headersToString(headers: Headers, indent = '') {
return [...headers.entries()].reduce(
(acc, [key, value]) => `${acc}\n${indent}${key}: ${value}`,
''
);
}
-async function retry(log, fn) {
- async function doAttempt(attempt) {
+async function retry(log: ToolingLog, fn: () => Promise): Promise {
+ let attempt = 0;
+ while (true) {
+ attempt += 1;
+
try {
return await fn();
} catch (error) {
@@ -49,13 +96,10 @@ async function retry(log, fn) {
}
log.warning('...failure, retrying in 5 seconds:', error.message);
- await new Promise((resolve) => setTimeout(resolve, 5000));
+ await setTimeout(5000);
log.info('...retrying');
- return await doAttempt(attempt + 1);
}
}
-
- return await doAttempt(1);
}
// Setting this flag provides an easy way to run the latest un-promoted snapshot without having to look it up
@@ -63,7 +107,7 @@ function shouldUseUnverifiedSnapshot() {
return !!process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED;
}
-async function fetchSnapshotManifest(url, log) {
+async function fetchSnapshotManifest(url: string, log: ToolingLog) {
log.info('Downloading snapshot manifest from %s', chalk.bold(url));
const abc = new AbortController();
@@ -73,7 +117,11 @@ async function fetchSnapshotManifest(url, log) {
return { abc, resp, json };
}
-async function getArtifactSpecForSnapshot(urlVersion, license, log) {
+async function getArtifactSpecForSnapshot(
+ urlVersion: string,
+ license: string,
+ log: ToolingLog
+): Promise {
const desiredVersion = urlVersion.replace('-SNAPSHOT', '');
const desiredLicense = license === 'oss' ? 'oss' : 'default';
@@ -103,17 +151,16 @@ async function getArtifactSpecForSnapshot(urlVersion, license, log) {
throw new Error(`Unable to read snapshot manifest: ${resp.statusText}\n ${json}`);
}
- const manifest = JSON.parse(json);
-
+ const manifest: ArtifactManifest = JSON.parse(json);
const platform = process.platform === 'win32' ? 'windows' : process.platform;
const arch = process.arch === 'arm64' ? 'aarch64' : 'x86_64';
const archive = manifest.archives.find(
- (archive) =>
- archive.version === desiredVersion &&
- archive.platform === platform &&
- archive.license === desiredLicense &&
- archive.architecture === arch
+ (a) =>
+ a.version === desiredVersion &&
+ a.platform === platform &&
+ a.license === desiredLicense &&
+ a.architecture === arch
);
if (!archive) {
@@ -130,93 +177,65 @@ async function getArtifactSpecForSnapshot(urlVersion, license, log) {
};
}
-exports.Artifact = class Artifact {
+export class Artifact {
/**
* Fetch an Artifact from the Artifact API for a license level and version
- * @param {('oss'|'basic'|'trial')} license
- * @param {string} version
- * @param {ToolingLog} log
*/
- static async getSnapshot(license, version, log) {
+ static async getSnapshot(license: ArtifactLicense, version: string, log: ToolingLog) {
const urlVersion = `${encodeURIComponent(version)}-SNAPSHOT`;
const customSnapshotArtifactSpec = resolveCustomSnapshotUrl(urlVersion, license);
if (customSnapshotArtifactSpec) {
- return new Artifact(customSnapshotArtifactSpec, log);
+ return new Artifact(log, customSnapshotArtifactSpec);
}
const artifactSpec = await getArtifactSpecForSnapshot(urlVersion, license, log);
- return new Artifact(artifactSpec, log);
+ return new Artifact(log, artifactSpec);
}
/**
* Fetch an Artifact from the Elasticsearch past releases url
- * @param {string} url
- * @param {ToolingLog} log
*/
- static async getArchive(url, log) {
+ static async getArchive(url: string, log: ToolingLog) {
const shaUrl = `${url}.sha512`;
- const artifactSpec = {
- url: url,
+ return new Artifact(log, {
+ url,
filename: path.basename(url),
checksumUrl: shaUrl,
checksumType: getChecksumType(shaUrl),
- };
-
- return new Artifact(artifactSpec, log);
- }
-
- constructor(spec, log) {
- this._spec = spec;
- this._log = log;
- }
-
- getUrl() {
- return this._spec.url;
- }
-
- getChecksumUrl() {
- return this._spec.checksumUrl;
+ });
}
- getChecksumType() {
- return this._spec.checksumType;
- }
-
- getFilename() {
- return this._spec.filename;
- }
+ constructor(private readonly log: ToolingLog, public readonly spec: ArtifactSpec) {}
/**
* Download the artifact to disk, skips the download if the cache is
* up-to-date, verifies checksum when downloaded
- * @param {string} dest
- * @return {Promise}
*/
- async download(dest, { useCached = false }) {
- await retry(this._log, async () => {
+ async download(dest: string, { useCached = false }: { useCached?: boolean } = {}) {
+ await retry(this.log, async () => {
const cacheMeta = cache.readMeta(dest);
const tmpPath = `${dest}.tmp`;
if (useCached) {
if (cacheMeta.exists) {
- this._log.info(
+ this.log.info(
'use-cached passed, forcing to use existing snapshot',
chalk.bold(cacheMeta.ts)
);
return;
} else {
- this._log.info('use-cached passed but no cached snapshot found. Continuing to download');
+ this.log.info('use-cached passed but no cached snapshot found. Continuing to download');
}
}
- const artifactResp = await this._download(tmpPath, cacheMeta.etag, cacheMeta.ts);
+ const artifactResp = await this.fetchArtifact(tmpPath, cacheMeta.etag, cacheMeta.ts);
if (artifactResp.cached) {
return;
}
- await this._verifyChecksum(artifactResp);
+ await this.verifyChecksum(artifactResp);
// cache the etag for future downloads
cache.writeMeta(dest, { etag: artifactResp.etag });
@@ -228,18 +247,18 @@ exports.Artifact = class Artifact {
/**
* Fetch the artifact with an etag
- * @param {string} tmpPath
- * @param {string} etag
- * @param {string} ts
- * @return {{ cached: true }|{ checksum: string, etag: string, first500Bytes: Buffer }}
*/
- async _download(tmpPath, etag, ts) {
- const url = this.getUrl();
+ private async fetchArtifact(
+ tmpPath: string,
+ etag: string,
+ ts: string
+ ): Promise {
+ const url = this.spec.url;
if (etag) {
- this._log.info('verifying cache of %s', chalk.bold(url));
+ this.log.info('verifying cache of %s', chalk.bold(url));
} else {
- this._log.info('downloading artifact from %s', chalk.bold(url));
+ this.log.info('downloading artifact from %s', chalk.bold(url));
}
const abc = new AbortController();
@@ -251,7 +270,7 @@ exports.Artifact = class Artifact {
});
if (resp.status === 304) {
- this._log.info('etags match, reusing cache from %s', chalk.bold(ts));
+ this.log.info('etags match, reusing cache from %s', chalk.bold(ts));
abc.abort();
return {
@@ -270,10 +289,10 @@ exports.Artifact = class Artifact {
}
if (etag) {
- this._log.info('cache invalid, redownloading');
+ this.log.info('cache invalid, redownloading');
}
- const hash = createHash(this.getChecksumType());
+ const hash = createHash(this.spec.checksumType);
let first500Bytes = Buffer.alloc(0);
let contentLength = 0;
@@ -300,8 +319,9 @@ exports.Artifact = class Artifact {
);
return {
+ cached: false,
checksum: hash.digest('hex'),
- etag: resp.headers.get('etag'),
+ etag: resp.headers.get('etag') ?? undefined,
contentLength,
first500Bytes,
headers: resp.headers,
@@ -310,14 +330,12 @@ exports.Artifact = class Artifact {
/**
* Verify the checksum of the downloaded artifact with the checksum at checksumUrl
- * @param {{ checksum: string, contentLength: number, first500Bytes: Buffer }} artifactResp
- * @return {Promise}
*/
- async _verifyChecksum(artifactResp) {
- this._log.info('downloading artifact checksum from %s', chalk.bold(this.getChecksumUrl()));
+ private async verifyChecksum(artifactResp: ArtifactDownloaded) {
+ this.log.info('downloading artifact checksum from %s', chalk.bold(this.spec.checksumUrl));
const abc = new AbortController();
- const resp = await fetch(this.getChecksumUrl(), {
+ const resp = await fetch(this.spec.checksumUrl, {
signal: abc.signal,
});
@@ -338,7 +356,7 @@ exports.Artifact = class Artifact {
const lenString = `${len} / ${artifactResp.contentLength}`;
throw createCliError(
- `artifact downloaded from ${this.getUrl()} does not match expected checksum\n` +
+ `artifact downloaded from ${this.spec.url} does not match expected checksum\n` +
` expected: ${expectedChecksum}\n` +
` received: ${artifactResp.checksum}\n` +
` headers: ${headersToString(artifactResp.headers, ' ')}\n` +
@@ -346,6 +364,6 @@ exports.Artifact = class Artifact {
);
}
- this._log.info('checksum verified');
+ this.log.info('checksum verified');
}
-};
+}
diff --git a/packages/kbn-es/src/custom_snapshots.js b/packages/kbn-es/src/custom_snapshots.ts
similarity index 82%
rename from packages/kbn-es/src/custom_snapshots.js
rename to packages/kbn-es/src/custom_snapshots.ts
index 9dd8097244947..f3e6d3ecaf857 100644
--- a/packages/kbn-es/src/custom_snapshots.js
+++ b/packages/kbn-es/src/custom_snapshots.ts
@@ -6,13 +6,15 @@
* Side Public License, v 1.
*/
-const { basename } = require('path');
+import Path from 'path';
-function isVersionFlag(a) {
+import type { ArtifactSpec } from './artifact';
+
+function isVersionFlag(a: string) {
return a.startsWith('--version');
}
-function getCustomSnapshotUrl() {
+export function getCustomSnapshotUrl() {
// force use of manually created snapshots until ReindexPutMappings fix
if (
!process.env.ES_SNAPSHOT_MANIFEST &&
@@ -28,7 +30,10 @@ function getCustomSnapshotUrl() {
}
}
-function resolveCustomSnapshotUrl(urlVersion, license) {
+export function resolveCustomSnapshotUrl(
+ urlVersion: string,
+ license: string
+): ArtifactSpec | undefined {
const customSnapshotUrl = getCustomSnapshotUrl();
if (!customSnapshotUrl) {
@@ -48,8 +53,6 @@ function resolveCustomSnapshotUrl(urlVersion, license) {
url: overrideUrl,
checksumUrl: overrideUrl + '.sha512',
checksumType: 'sha512',
- filename: basename(overrideUrl),
+ filename: Path.basename(overrideUrl),
};
}
-
-module.exports = { getCustomSnapshotUrl, resolveCustomSnapshotUrl };
diff --git a/packages/kbn-es/src/errors.ts b/packages/kbn-es/src/errors.ts
new file mode 100644
index 0000000000000..a0c526dc48a9c
--- /dev/null
+++ b/packages/kbn-es/src/errors.ts
@@ -0,0 +1,25 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+interface CliError extends Error {
+ isCliError: boolean;
+}
+
+export function createCliError(message: string) {
+ return Object.assign(new Error(message), {
+ isCliError: true,
+ });
+}
+
+function isObj(x: unknown): x is Record {
+ return typeof x === 'object' && x !== null;
+}
+
+export function isCliError(error: unknown): error is CliError {
+ return isObj(error) && error.isCliError === true;
+}
diff --git a/src/plugins/discover/public/utils/get_single_doc_url.ts b/packages/kbn-es/src/index.ts
similarity index 65%
rename from src/plugins/discover/public/utils/get_single_doc_url.ts
rename to packages/kbn-es/src/index.ts
index 913463e6d44a4..68fd931794c0c 100644
--- a/src/plugins/discover/public/utils/get_single_doc_url.ts
+++ b/packages/kbn-es/src/index.ts
@@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
-export const getSingleDocUrl = (indexPatternId: string, rowIndex: string, rowId: string) => {
- return `/app/discover#/doc/${indexPatternId}/${rowIndex}?id=${encodeURIComponent(rowId)}`;
-};
+// @ts-expect-error not typed yet
+export { run } from './cli';
+// @ts-expect-error not typed yet
+export { Cluster } from './cluster';
diff --git a/packages/kbn-es/src/install/index.ts b/packages/kbn-es/src/install/index.ts
new file mode 100644
index 0000000000000..e827dee2247f9
--- /dev/null
+++ b/packages/kbn-es/src/install/index.ts
@@ -0,0 +1,11 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export { installArchive } from './install_archive';
+export { installSnapshot, downloadSnapshot } from './install_snapshot';
+export { installSource } from './install_source';
diff --git a/packages/kbn-es/src/install/archive.js b/packages/kbn-es/src/install/install_archive.ts
similarity index 64%
rename from packages/kbn-es/src/install/archive.js
rename to packages/kbn-es/src/install/install_archive.ts
index 76db5a4427e6d..ee04d9e4b62b5 100644
--- a/packages/kbn-es/src/install/archive.js
+++ b/packages/kbn-es/src/install/install_archive.ts
@@ -6,29 +6,40 @@
* Side Public License, v 1.
*/
-const fs = require('fs');
-const path = require('path');
-const chalk = require('chalk');
-const execa = require('execa');
-const del = require('del');
-const url = require('url');
-const { extract } = require('@kbn/dev-utils');
-const { log: defaultLog } = require('../utils');
-const { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } = require('../paths');
-const { Artifact } = require('../artifact');
-const { parseSettings, SettingsFilter } = require('../settings');
+import fs from 'fs';
+import path from 'path';
+
+import chalk from 'chalk';
+import execa from 'execa';
+import del from 'del';
+import { extract, ToolingLog } from '@kbn/dev-utils';
+
+import { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } from '../paths';
+import { Artifact } from '../artifact';
+import { parseSettings, SettingsFilter } from '../settings';
+import { log as defaultLog } from '../utils/log';
+
+interface InstallArchiveOptions {
+ license?: string;
+ password?: string;
+ basePath?: string;
+ installPath?: string;
+ log?: ToolingLog;
+ esArgs?: string[];
+}
+
+const isHttpUrl = (str: string) => {
+ try {
+ return ['http:', 'https:'].includes(new URL(str).protocol);
+ } catch {
+ return false;
+ }
+};
/**
* Extracts an ES archive and optionally installs plugins
- *
- * @param {String} archive - path to tar
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.installArchive = async function installArchive(archive, options = {}) {
+export async function installArchive(archive: string, options: InstallArchiveOptions = {}) {
const {
license = 'basic',
password = 'changeme',
@@ -39,9 +50,9 @@ exports.installArchive = async function installArchive(archive, options = {}) {
} = options;
let dest = archive;
- if (['http:', 'https:'].includes(url.parse(archive).protocol)) {
+ if (isHttpUrl(archive)) {
const artifact = await Artifact.getArchive(archive, log);
- dest = path.resolve(basePath, 'cache', artifact.getFilename());
+ dest = path.resolve(basePath, 'cache', artifact.spec.filename);
await artifact.download(dest);
}
@@ -75,28 +86,23 @@ exports.installArchive = async function installArchive(archive, options = {}) {
}
return { installPath };
-};
+}
/**
* Appends single line to elasticsearch.yml config file
- *
- * @param {String} installPath
- * @param {String} key
- * @param {String} value
*/
-async function appendToConfig(installPath, key, value) {
+async function appendToConfig(installPath: string, key: string, value: string) {
fs.appendFileSync(path.resolve(installPath, ES_CONFIG), `${key}: ${value}\n`, 'utf8');
}
/**
* Creates and configures Keystore
- *
- * @param {String} installPath
- * @param {ToolingLog} log
- * @param {Array<[string, string]>} secureSettings List of custom Elasticsearch secure settings to
- * add into the keystore.
*/
-async function configureKeystore(installPath, log = defaultLog, secureSettings) {
+async function configureKeystore(
+ installPath: string,
+ log: ToolingLog = defaultLog,
+ secureSettings: Array<[string, string]>
+) {
const env = { JAVA_HOME: '' };
await execa(ES_KEYSTORE_BIN, ['create'], { cwd: installPath, env });
diff --git a/packages/kbn-es/src/install/snapshot.js b/packages/kbn-es/src/install/install_snapshot.ts
similarity index 55%
rename from packages/kbn-es/src/install/snapshot.js
rename to packages/kbn-es/src/install/install_snapshot.ts
index cf1ce50f7e413..84d713745eb82 100644
--- a/packages/kbn-es/src/install/snapshot.js
+++ b/packages/kbn-es/src/install/install_snapshot.ts
@@ -6,56 +6,58 @@
* Side Public License, v 1.
*/
-const chalk = require('chalk');
-const path = require('path');
-const { BASE_PATH } = require('../paths');
-const { installArchive } = require('./archive');
-const { log: defaultLog } = require('../utils');
-const { Artifact } = require('../artifact');
+import path from 'path';
+
+import chalk from 'chalk';
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { BASE_PATH } from '../paths';
+import { installArchive } from './install_archive';
+import { log as defaultLog } from '../utils/log';
+import { Artifact, ArtifactLicense } from '../artifact';
+
+interface DownloadSnapshotOptions {
+ version: string;
+ license?: ArtifactLicense;
+ basePath?: string;
+ installPath?: string;
+ log?: ToolingLog;
+ useCached?: boolean;
+}
/**
* Download an ES snapshot
- *
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.version
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.downloadSnapshot = async function installSnapshot({
+export async function downloadSnapshot({
license = 'basic',
version,
basePath = BASE_PATH,
installPath = path.resolve(basePath, version),
log = defaultLog,
useCached = false,
-}) {
+}: DownloadSnapshotOptions) {
log.info('version: %s', chalk.bold(version));
log.info('install path: %s', chalk.bold(installPath));
log.info('license: %s', chalk.bold(license));
const artifact = await Artifact.getSnapshot(license, version, log);
- const dest = path.resolve(basePath, 'cache', artifact.getFilename());
+ const dest = path.resolve(basePath, 'cache', artifact.spec.filename);
await artifact.download(dest, { useCached });
return {
downloadPath: dest,
};
-};
+}
+
+interface InstallSnapshotOptions extends DownloadSnapshotOptions {
+ password?: string;
+ esArgs?: string[];
+}
/**
* Installs ES from snapshot
- *
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.password
- * @property {String} options.version
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.installSnapshot = async function installSnapshot({
+export async function installSnapshot({
license = 'basic',
password = 'password',
version,
@@ -64,8 +66,8 @@ exports.installSnapshot = async function installSnapshot({
log = defaultLog,
esArgs,
useCached = false,
-}) {
- const { downloadPath } = await exports.downloadSnapshot({
+}: InstallSnapshotOptions) {
+ const { downloadPath } = await downloadSnapshot({
license,
version,
basePath,
@@ -82,4 +84,4 @@ exports.installSnapshot = async function installSnapshot({
log,
esArgs,
});
-};
+}
diff --git a/packages/kbn-es/src/install/source.js b/packages/kbn-es/src/install/install_source.ts
similarity index 73%
rename from packages/kbn-es/src/install/source.js
rename to packages/kbn-es/src/install/install_source.ts
index 81a1019509906..d8c272677058e 100644
--- a/packages/kbn-es/src/install/source.js
+++ b/packages/kbn-es/src/install/install_source.ts
@@ -6,28 +6,35 @@
* Side Public License, v 1.
*/
-const path = require('path');
-const fs = require('fs');
-const os = require('os');
-const chalk = require('chalk');
-const crypto = require('crypto');
-const simpleGit = require('simple-git/promise');
-const { installArchive } = require('./archive');
-const { log: defaultLog, cache, buildSnapshot, archiveForPlatform } = require('../utils');
-const { BASE_PATH } = require('../paths');
+import path from 'path';
+import fs from 'fs';
+import os from 'os';
+import crypto from 'crypto';
+
+import chalk from 'chalk';
+import simpleGit from 'simple-git/promise';
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { installArchive } from './install_archive';
+import { log as defaultLog } from '../utils/log';
+import { cache } from '../utils/cache';
+import { buildSnapshot, archiveForPlatform } from '../utils/build_snapshot';
+import { BASE_PATH } from '../paths';
+
+interface InstallSourceOptions {
+ sourcePath: string;
+ license?: string;
+ password?: string;
+ basePath?: string;
+ installPath?: string;
+ log?: ToolingLog;
+ esArgs?: string[];
+}
/**
* Installs ES from source
- *
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.password
- * @property {String} options.sourcePath
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.installSource = async function installSource({
+export async function installSource({
license = 'basic',
password = 'changeme',
sourcePath,
@@ -35,7 +42,7 @@ exports.installSource = async function installSource({
installPath = path.resolve(basePath, 'source'),
log = defaultLog,
esArgs,
-}) {
+}: InstallSourceOptions) {
log.info('source path: %s', chalk.bold(sourcePath));
log.info('install path: %s', chalk.bold(installPath));
log.info('license: %s', chalk.bold(license));
@@ -62,14 +69,9 @@ exports.installSource = async function installSource({
log,
esArgs,
});
-};
+}
-/**
- *
- * @param {String} cwd
- * @param {ToolingLog} log
- */
-async function sourceInfo(cwd, license, log = defaultLog) {
+async function sourceInfo(cwd: string, license: string, log: ToolingLog = defaultLog) {
if (!fs.existsSync(cwd)) {
throw new Error(`${cwd} does not exist`);
}
diff --git a/packages/kbn-es/src/paths.js b/packages/kbn-es/src/paths.js
deleted file mode 100644
index 5c8d3b654ecf9..0000000000000
--- a/packages/kbn-es/src/paths.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-const os = require('os');
-const path = require('path');
-
-function maybeUseBat(bin) {
- return os.platform().startsWith('win') ? `${bin}.bat` : bin;
-}
-
-const tempDir = os.tmpdir();
-
-exports.BASE_PATH = path.resolve(tempDir, 'kbn-es');
-
-exports.GRADLE_BIN = maybeUseBat('./gradlew');
-exports.ES_BIN = maybeUseBat('bin/elasticsearch');
-exports.ES_CONFIG = 'config/elasticsearch.yml';
-
-exports.ES_KEYSTORE_BIN = maybeUseBat('./bin/elasticsearch-keystore');
diff --git a/packages/kbn-es/src/paths.ts b/packages/kbn-es/src/paths.ts
new file mode 100644
index 0000000000000..c1b859af4e1f5
--- /dev/null
+++ b/packages/kbn-es/src/paths.ts
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import Os from 'os';
+import Path from 'path';
+
+function maybeUseBat(bin: string) {
+ return Os.platform().startsWith('win') ? `${bin}.bat` : bin;
+}
+
+const tempDir = Os.tmpdir();
+
+export const BASE_PATH = Path.resolve(tempDir, 'kbn-es');
+
+export const GRADLE_BIN = maybeUseBat('./gradlew');
+export const ES_BIN = maybeUseBat('bin/elasticsearch');
+export const ES_CONFIG = 'config/elasticsearch.yml';
+
+export const ES_KEYSTORE_BIN = maybeUseBat('./bin/elasticsearch-keystore');
diff --git a/packages/kbn-es/src/utils/build_snapshot.js b/packages/kbn-es/src/utils/build_snapshot.ts
similarity index 53%
rename from packages/kbn-es/src/utils/build_snapshot.js
rename to packages/kbn-es/src/utils/build_snapshot.ts
index ec26ba69e658b..542e63dcc0748 100644
--- a/packages/kbn-es/src/utils/build_snapshot.js
+++ b/packages/kbn-es/src/utils/build_snapshot.ts
@@ -6,25 +6,25 @@
* Side Public License, v 1.
*/
-const execa = require('execa');
-const path = require('path');
-const os = require('os');
-const readline = require('readline');
-const { createCliError } = require('../errors');
-const { findMostRecentlyChanged } = require('../utils');
-const { GRADLE_BIN } = require('../paths');
+import path from 'path';
+import os from 'os';
-const onceEvent = (emitter, event) => new Promise((resolve) => emitter.once(event, resolve));
+import { ToolingLog, withProcRunner } from '@kbn/dev-utils';
+
+import { createCliError } from '../errors';
+import { findMostRecentlyChanged } from './find_most_recently_changed';
+import { GRADLE_BIN } from '../paths';
+
+interface BuildSnapshotOptions {
+ license: string;
+ sourcePath: string;
+ log: ToolingLog;
+ platform?: string;
+}
/**
* Creates archive from source
*
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.sourcePath
- * @property {ToolingLog} options.log
- * @returns {Object} containing archive and optional plugins
- *
* Gradle tasks:
* $ ./gradlew tasks --all | grep 'distribution.*assemble\s'
* :distribution:archives:darwin-tar:assemble
@@ -34,39 +34,27 @@ const onceEvent = (emitter, event) => new Promise((resolve) => emitter.once(even
* :distribution:archives:oss-linux-tar:assemble
* :distribution:archives:oss-windows-zip:assemble
*/
-exports.buildSnapshot = async ({ license, sourcePath, log, platform = os.platform() }) => {
+export async function buildSnapshot({
+ license,
+ sourcePath,
+ log,
+ platform = os.platform(),
+}: BuildSnapshotOptions) {
const { task, ext } = exports.archiveForPlatform(platform, license);
const buildArgs = [`:distribution:archives:${task}:assemble`];
log.info('%s %s', GRADLE_BIN, buildArgs.join(' '));
log.debug('cwd:', sourcePath);
- const build = execa(GRADLE_BIN, buildArgs, {
- cwd: sourcePath,
- stdio: ['ignore', 'pipe', 'pipe'],
+ await withProcRunner(log, async (procs) => {
+ await procs.run('gradle', {
+ cmd: GRADLE_BIN,
+ args: buildArgs,
+ cwd: sourcePath,
+ wait: true,
+ });
});
- const stdout = readline.createInterface({ input: build.stdout });
- const stderr = readline.createInterface({ input: build.stderr });
-
- stdout.on('line', (line) => log.debug(line));
- stderr.on('line', (line) => log.error(line));
-
- const [exitCode] = await Promise.all([
- Promise.race([
- onceEvent(build, 'exit'),
- onceEvent(build, 'error').then((error) => {
- throw createCliError(`Error spawning gradle: ${error.message}`);
- }),
- ]),
- onceEvent(stdout, 'close'),
- onceEvent(stderr, 'close'),
- ]);
-
- if (exitCode > 0) {
- throw createCliError('unable to build ES');
- }
-
const archivePattern = `distribution/archives/${task}/build/distributions/elasticsearch-*.${ext}`;
const esArchivePath = findMostRecentlyChanged(path.resolve(sourcePath, archivePattern));
@@ -75,9 +63,9 @@ exports.buildSnapshot = async ({ license, sourcePath, log, platform = os.platfor
}
return esArchivePath;
-};
+}
-exports.archiveForPlatform = (platform, license) => {
+export function archiveForPlatform(platform: NodeJS.Platform, license: string) {
const taskPrefix = license === 'oss' ? 'oss-' : '';
switch (platform) {
@@ -88,6 +76,6 @@ exports.archiveForPlatform = (platform, license) => {
case 'linux':
return { format: 'tar', ext: 'tar.gz', task: `${taskPrefix}linux-tar`, platform: 'linux' };
default:
- throw new Error(`unknown platform: ${platform}`);
+ throw new Error(`unsupported platform: ${platform}`);
}
-};
+}
diff --git a/packages/kbn-es/src/utils/cache.js b/packages/kbn-es/src/utils/cache.js
deleted file mode 100644
index 248faf23bbc46..0000000000000
--- a/packages/kbn-es/src/utils/cache.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-const fs = require('fs');
-const path = require('path');
-
-exports.readMeta = function readMeta(file) {
- try {
- const meta = fs.readFileSync(`${file}.meta`, {
- encoding: 'utf8',
- });
-
- return {
- exists: fs.existsSync(file),
- ...JSON.parse(meta),
- };
- } catch (e) {
- if (e.code !== 'ENOENT') {
- throw e;
- }
-
- return {
- exists: false,
- };
- }
-};
-
-exports.writeMeta = function readMeta(file, details = {}) {
- const meta = {
- ts: new Date(),
- ...details,
- };
-
- fs.mkdirSync(path.dirname(file), { recursive: true });
- fs.writeFileSync(`${file}.meta`, JSON.stringify(meta, null, 2));
-};
diff --git a/packages/kbn-es/src/utils/cache.ts b/packages/kbn-es/src/utils/cache.ts
new file mode 100644
index 0000000000000..819119b6ce010
--- /dev/null
+++ b/packages/kbn-es/src/utils/cache.ts
@@ -0,0 +1,40 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import Fs from 'fs';
+import Path from 'path';
+
+export const cache = {
+ readMeta(path: string) {
+ try {
+ const meta = Fs.readFileSync(`${path}.meta`, {
+ encoding: 'utf8',
+ });
+
+ return {
+ ...JSON.parse(meta),
+ };
+ } catch (e) {
+ if (e.code !== 'ENOENT') {
+ throw e;
+ }
+
+ return {};
+ }
+ },
+
+ writeMeta(path: string, details = {}) {
+ const meta = {
+ ts: new Date(),
+ ...details,
+ };
+
+ Fs.mkdirSync(Path.dirname(path), { recursive: true });
+ Fs.writeFileSync(`${path}.meta`, JSON.stringify(meta, null, 2));
+ },
+};
diff --git a/packages/kbn-es/src/utils/find_most_recently_changed.test.js b/packages/kbn-es/src/utils/find_most_recently_changed.test.ts
similarity index 93%
rename from packages/kbn-es/src/utils/find_most_recently_changed.test.js
rename to packages/kbn-es/src/utils/find_most_recently_changed.test.ts
index 8198495e7197f..721e5baba7513 100644
--- a/packages/kbn-es/src/utils/find_most_recently_changed.test.js
+++ b/packages/kbn-es/src/utils/find_most_recently_changed.test.ts
@@ -6,6 +6,8 @@
* Side Public License, v 1.
*/
+import { findMostRecentlyChanged } from './find_most_recently_changed';
+
jest.mock('fs', () => ({
statSync: jest.fn().mockImplementation((path) => {
if (path.includes('oldest')) {
@@ -31,8 +33,6 @@ jest.mock('fs', () => ({
}),
}));
-const { findMostRecentlyChanged } = require('./find_most_recently_changed');
-
test('returns newest file', () => {
const file = findMostRecentlyChanged('/data/*.yml');
expect(file).toEqual('/data/newest.yml');
diff --git a/packages/kbn-es/src/utils/find_most_recently_changed.js b/packages/kbn-es/src/utils/find_most_recently_changed.ts
similarity index 65%
rename from packages/kbn-es/src/utils/find_most_recently_changed.js
rename to packages/kbn-es/src/utils/find_most_recently_changed.ts
index 16d300f080b8d..29e1edcc5fcc9 100644
--- a/packages/kbn-es/src/utils/find_most_recently_changed.js
+++ b/packages/kbn-es/src/utils/find_most_recently_changed.ts
@@ -6,25 +6,22 @@
* Side Public License, v 1.
*/
-const path = require('path');
-const fs = require('fs');
-const glob = require('glob');
+import path from 'path';
+import fs from 'fs';
+import glob from 'glob';
/**
* Find the most recently modified file that matches the pattern pattern
- *
- * @param {String} pattern absolute path with glob expressions
- * @return {String} Absolute path
*/
-exports.findMostRecentlyChanged = function findMostRecentlyChanged(pattern) {
+export function findMostRecentlyChanged(pattern: string) {
if (!path.isAbsolute(pattern)) {
throw new TypeError(`Pattern must be absolute, got ${pattern}`);
}
- const ctime = (path) => fs.statSync(path).ctime.getTime();
+ const ctime = (p: string) => fs.statSync(p).ctime.getTime();
return glob
.sync(pattern)
.sort((a, b) => ctime(a) - ctime(b))
.pop();
-};
+}
diff --git a/packages/kbn-es/src/utils/index.js b/packages/kbn-es/src/utils/index.js
deleted file mode 100644
index ed83495e5310a..0000000000000
--- a/packages/kbn-es/src/utils/index.js
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-exports.cache = require('./cache');
-exports.log = require('./log').log;
-exports.parseEsLog = require('./parse_es_log').parseEsLog;
-exports.findMostRecentlyChanged = require('./find_most_recently_changed').findMostRecentlyChanged;
-exports.extractConfigFiles = require('./extract_config_files').extractConfigFiles;
-exports.NativeRealm = require('./native_realm').NativeRealm;
-exports.buildSnapshot = require('./build_snapshot').buildSnapshot;
-exports.archiveForPlatform = require('./build_snapshot').archiveForPlatform;
diff --git a/packages/kbn-es/src/utils/index.ts b/packages/kbn-es/src/utils/index.ts
new file mode 100644
index 0000000000000..ce0a222dafd3b
--- /dev/null
+++ b/packages/kbn-es/src/utils/index.ts
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export { cache } from './cache';
+export { log } from './log';
+// @ts-expect-error not typed yet
+export { parseEsLog } from './parse_es_log';
+export { findMostRecentlyChanged } from './find_most_recently_changed';
+// @ts-expect-error not typed yet
+export { extractConfigFiles } from './extract_config_files';
+// @ts-expect-error not typed yet
+export { NativeRealm } from './native_realm';
+export { buildSnapshot } from './build_snapshot';
+export { archiveForPlatform } from './build_snapshot';
diff --git a/packages/kbn-es/src/utils/log.js b/packages/kbn-es/src/utils/log.ts
similarity index 80%
rename from packages/kbn-es/src/utils/log.js
rename to packages/kbn-es/src/utils/log.ts
index b33ae509c6c45..a0299f885cf6a 100644
--- a/packages/kbn-es/src/utils/log.js
+++ b/packages/kbn-es/src/utils/log.ts
@@ -6,11 +6,9 @@
* Side Public License, v 1.
*/
-const { ToolingLog } = require('@kbn/dev-utils');
+import { ToolingLog } from '@kbn/dev-utils';
-const log = new ToolingLog({
+export const log = new ToolingLog({
level: 'verbose',
writeTo: process.stdout,
});
-
-exports.log = log;
diff --git a/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel b/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel
index a4d96f76053e1..759f4ac706471 100644
--- a/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel
+++ b/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel
@@ -1,4 +1,5 @@
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "pkg_npm")
PKG_BASE_NAME = "kbn-eslint-import-resolver-kibana"
PKG_REQUIRE_NAME = "@kbn/eslint-import-resolver-kibana"
diff --git a/packages/kbn-eslint-plugin-eslint/helpers/exports.js b/packages/kbn-eslint-plugin-eslint/helpers/exports.js
index b7af8e83d7661..971364633356c 100644
--- a/packages/kbn-eslint-plugin-eslint/helpers/exports.js
+++ b/packages/kbn-eslint-plugin-eslint/helpers/exports.js
@@ -9,7 +9,7 @@
const Fs = require('fs');
const Path = require('path');
const ts = require('typescript');
-const { REPO_ROOT } = require('@kbn/dev-utils');
+const { REPO_ROOT } = require('@kbn/utils');
const { ExportSet } = require('./export_set');
/** @typedef {import("@typescript-eslint/types").TSESTree.ExportAllDeclaration} ExportAllDeclaration */
diff --git a/packages/kbn-optimizer/BUILD.bazel b/packages/kbn-optimizer/BUILD.bazel
index a389086c9ee3c..3bd41249e2d51 100644
--- a/packages/kbn-optimizer/BUILD.bazel
+++ b/packages/kbn-optimizer/BUILD.bazel
@@ -38,10 +38,12 @@ RUNTIME_DEPS = [
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
"//packages/kbn-utils",
+ "@npm//@babel/core",
"@npm//chalk",
"@npm//clean-webpack-plugin",
"@npm//compression-webpack-plugin",
"@npm//cpy",
+ "@npm//dedent",
"@npm//del",
"@npm//execa",
"@npm//jest-diff",
@@ -64,7 +66,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-config:npm_module_types",
"//packages/kbn-config-schema:npm_module_types",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-std",
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
@@ -79,7 +81,9 @@ TYPES_DEPS = [
"@npm//pirates",
"@npm//rxjs",
"@npm//zlib",
+ "@npm//@types/babel__core",
"@npm//@types/compression-webpack-plugin",
+ "@npm//@types/dedent",
"@npm//@types/jest",
"@npm//@types/json-stable-stringify",
"@npm//@types/js-yaml",
diff --git a/packages/kbn-optimizer/limits.yml b/packages/kbn-optimizer/limits.yml
index 41c4d3bdd1b35..1de3a8a1b3976 100644
--- a/packages/kbn-optimizer/limits.yml
+++ b/packages/kbn-optimizer/limits.yml
@@ -117,3 +117,4 @@ pageLoadAssetSize:
dataViewManagement: 5000
reporting: 57003
visTypeHeatmap: 25340
+ screenshotting: 17017
diff --git a/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts b/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts
index f00905f3f4920..c07a9764af76f 100644
--- a/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts
+++ b/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { run, REPO_ROOT } from '@kbn/dev-utils';
+import { run } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { OptimizerConfig } from '../optimizer';
import { parseStats, inAnyEntryChunk } from './parse_stats';
diff --git a/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts b/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts
index 6f5dabf410ffa..2710ba8a54210 100644
--- a/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts
+++ b/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts
@@ -39,7 +39,7 @@ import Crypto from 'crypto';
import * as babel from '@babel/core';
import { addHook } from 'pirates';
-import { REPO_ROOT, UPSTREAM_BRANCH } from '@kbn/dev-utils';
+import { REPO_ROOT, UPSTREAM_BRANCH } from '@kbn/utils';
import sourceMapSupport from 'source-map-support';
import { Cache } from './cache';
diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts
index d3cc5cceefddf..d1754248dba17 100644
--- a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts
+++ b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts
@@ -9,7 +9,8 @@
jest.mock('execa');
import { getChanges } from './get_changes';
-import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const execa: jest.Mock = jest.requireMock('execa');
diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.ts b/packages/kbn-optimizer/src/optimizer/get_changes.ts
index c5f8abe99c322..b59f938eb8c37 100644
--- a/packages/kbn-optimizer/src/optimizer/get_changes.ts
+++ b/packages/kbn-optimizer/src/optimizer/get_changes.ts
@@ -10,7 +10,7 @@ import Path from 'path';
import execa from 'execa';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
export type Changes = Map;
diff --git a/packages/kbn-plugin-generator/BUILD.bazel b/packages/kbn-plugin-generator/BUILD.bazel
index c935d1763dae8..488f09bdd5d52 100644
--- a/packages/kbn-plugin-generator/BUILD.bazel
+++ b/packages/kbn-plugin-generator/BUILD.bazel
@@ -51,7 +51,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-utils",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"@npm//del",
"@npm//execa",
"@npm//globby",
diff --git a/packages/kbn-plugin-helpers/BUILD.bazel b/packages/kbn-plugin-helpers/BUILD.bazel
index d7744aecac26e..47f205f1530b7 100644
--- a/packages/kbn-plugin-helpers/BUILD.bazel
+++ b/packages/kbn-plugin-helpers/BUILD.bazel
@@ -42,7 +42,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-optimizer",
"//packages/kbn-utils",
"@npm//del",
diff --git a/packages/kbn-pm/dist/index.js b/packages/kbn-pm/dist/index.js
index c1d0f69e4ea07..fc92d18698132 100644
--- a/packages/kbn-pm/dist/index.js
+++ b/packages/kbn-pm/dist/index.js
@@ -6639,7 +6639,15 @@ class ToolingLogTextWriter {
}
if (this.ignoreSources && msg.source && this.ignoreSources.includes(msg.source)) {
- return false;
+ if (msg.type === 'write') {
+ const txt = (0, _util.format)(msg.args[0], ...msg.args.slice(1)); // Ensure that Elasticsearch deprecation log messages from Kibana aren't ignored
+
+ if (!/elasticsearch\.deprecation/.test(txt)) {
+ return false;
+ }
+ } else {
+ return false;
+ }
}
const prefix = has(MSG_PREFIXES, msg.type) ? MSG_PREFIXES[msg.type] : '';
diff --git a/packages/kbn-rule-data-utils/BUILD.bazel b/packages/kbn-rule-data-utils/BUILD.bazel
index 730e907aafc65..d23cf25f181ca 100644
--- a/packages/kbn-rule-data-utils/BUILD.bazel
+++ b/packages/kbn-rule-data-utils/BUILD.bazel
@@ -34,7 +34,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-es-query",
+ "//packages/kbn-es-query:npm_module_types",
"@npm//@elastic/elasticsearch",
"@npm//tslib",
"@npm//utility-types",
diff --git a/packages/kbn-rule-data-utils/src/technical_field_names.ts b/packages/kbn-rule-data-utils/src/technical_field_names.ts
index 349719c019c22..fde8deade36b5 100644
--- a/packages/kbn-rule-data-utils/src/technical_field_names.ts
+++ b/packages/kbn-rule-data-utils/src/technical_field_names.ts
@@ -24,6 +24,7 @@ const VERSION = `${KIBANA_NAMESPACE}.version` as const;
// Fields pertaining to the alert
const ALERT_ACTION_GROUP = `${ALERT_NAMESPACE}.action_group` as const;
+const ALERT_BUILDING_BLOCK_TYPE = `${ALERT_NAMESPACE}.building_block_type` as const;
const ALERT_DURATION = `${ALERT_NAMESPACE}.duration.us` as const;
const ALERT_END = `${ALERT_NAMESPACE}.end` as const;
const ALERT_EVALUATION_THRESHOLD = `${ALERT_NAMESPACE}.evaluation.threshold` as const;
@@ -91,6 +92,7 @@ const fields = {
TAGS,
TIMESTAMP,
ALERT_ACTION_GROUP,
+ ALERT_BUILDING_BLOCK_TYPE,
ALERT_DURATION,
ALERT_END,
ALERT_EVALUATION_THRESHOLD,
@@ -141,6 +143,7 @@ const fields = {
export {
ALERT_ACTION_GROUP,
+ ALERT_BUILDING_BLOCK_TYPE,
ALERT_DURATION,
ALERT_END,
ALERT_EVALUATION_THRESHOLD,
diff --git a/packages/kbn-securitysolution-autocomplete/BUILD.bazel b/packages/kbn-securitysolution-autocomplete/BUILD.bazel
index 57ac8c62273e0..50df292b8796e 100644
--- a/packages/kbn-securitysolution-autocomplete/BUILD.bazel
+++ b/packages/kbn-securitysolution-autocomplete/BUILD.bazel
@@ -45,7 +45,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-es-query",
+ "//packages/kbn-es-query:npm_module_types",
"//packages/kbn-i18n",
"//packages/kbn-securitysolution-list-hooks",
"//packages/kbn-securitysolution-list-utils",
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts
index e491b50b0f9c8..176a6357b30e7 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts
@@ -10,9 +10,11 @@ import { EndpointEntriesArray } from '.';
import { getEndpointEntryMatchMock } from '../entry_match/index.mock';
import { getEndpointEntryMatchAnyMock } from '../entry_match_any/index.mock';
import { getEndpointEntryNestedMock } from '../entry_nested/index.mock';
+import { getEndpointEntryMatchWildcard } from '../entry_match_wildcard/index.mock';
export const getEndpointEntriesArrayMock = (): EndpointEntriesArray => [
getEndpointEntryMatchMock(),
getEndpointEntryMatchAnyMock(),
getEndpointEntryNestedMock(),
+ getEndpointEntryMatchWildcard(),
];
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts
index 09f1740567bc1..ca852e15c5c2a 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts
@@ -20,6 +20,7 @@ import { getEndpointEntryNestedMock } from '../entry_nested/index.mock';
import { getEndpointEntriesArrayMock } from './index.mock';
import { getEntryListMock } from '../../entries_list/index.mock';
import { getEntryExistsMock } from '../../entries_exist/index.mock';
+import { getEndpointEntryMatchWildcard } from '../entry_match_wildcard/index.mock';
describe('Endpoint', () => {
describe('entriesArray', () => {
@@ -99,6 +100,15 @@ describe('Endpoint', () => {
expect(message.schema).toEqual(payload);
});
+ test('it should validate an array with wildcard entry', () => {
+ const payload = [getEndpointEntryMatchWildcard()];
+ const decoded = endpointEntriesArray.decode(payload);
+ const message = pipe(decoded, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
test('it should validate an array with all types of entries', () => {
const payload = getEndpointEntriesArrayMock();
const decoded = endpointEntriesArray.decode(payload);
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts
index 451131dafc459..58b0d80f9c1fa 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts
@@ -11,9 +11,15 @@ import { Either } from 'fp-ts/lib/Either';
import { endpointEntryMatch } from '../entry_match';
import { endpointEntryMatchAny } from '../entry_match_any';
import { endpointEntryNested } from '../entry_nested';
+import { endpointEntryMatchWildcard } from '../entry_match_wildcard';
export const endpointEntriesArray = t.array(
- t.union([endpointEntryMatch, endpointEntryMatchAny, endpointEntryNested])
+ t.union([
+ endpointEntryMatch,
+ endpointEntryMatchAny,
+ endpointEntryMatchWildcard,
+ endpointEntryNested,
+ ])
);
export type EndpointEntriesArray = t.TypeOf;
diff --git a/packages/kbn-es/src/install/index.js b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entry_match_wildcard/index.mock.ts
similarity index 53%
rename from packages/kbn-es/src/install/index.js
rename to packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entry_match_wildcard/index.mock.ts
index 07582f73c663a..e001552277e0c 100644
--- a/packages/kbn-es/src/install/index.js
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entry_match_wildcard/index.mock.ts
@@ -6,7 +6,12 @@
* Side Public License, v 1.
*/
-exports.installArchive = require('./archive').installArchive;
-exports.installSnapshot = require('./snapshot').installSnapshot;
-exports.downloadSnapshot = require('./snapshot').downloadSnapshot;
-exports.installSource = require('./source').installSource;
+import { ENTRY_VALUE, FIELD, OPERATOR, WILDCARD } from '../../../constants/index.mock';
+import { EndpointEntryMatchWildcard } from './index';
+
+export const getEndpointEntryMatchWildcard = (): EndpointEntryMatchWildcard => ({
+ field: FIELD,
+ operator: OPERATOR,
+ type: WILDCARD,
+ value: ENTRY_VALUE,
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.mock.ts
new file mode 100644
index 0000000000000..03ec225351e6d
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.mock.ts
@@ -0,0 +1,34 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { ENTRIES } from '../../constants/index.mock';
+import { ImportExceptionListItemSchema, ImportExceptionListItemSchemaDecoded } from '.';
+
+export const getImportExceptionsListItemSchemaMock = (
+ itemId = 'item_id_1',
+ listId = 'detection_list_id'
+): ImportExceptionListItemSchema => ({
+ description: 'some description',
+ entries: ENTRIES,
+ item_id: itemId,
+ list_id: listId,
+ name: 'Query with a rule id',
+ type: 'simple',
+});
+
+export const getImportExceptionsListItemSchemaDecodedMock = (
+ itemId = 'item_id_1',
+ listId = 'detection_list_id'
+): ImportExceptionListItemSchemaDecoded => ({
+ ...getImportExceptionsListItemSchemaMock(itemId, listId),
+ comments: [],
+ meta: undefined,
+ namespace_type: 'single',
+ os_types: [],
+ tags: [],
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.test.ts
new file mode 100644
index 0000000000000..d202f65b57ab5
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.test.ts
@@ -0,0 +1,143 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { pipe } from 'fp-ts/lib/pipeable';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+import { importExceptionListItemSchema, ImportExceptionListItemSchema } from '.';
+import {
+ getImportExceptionsListItemSchemaDecodedMock,
+ getImportExceptionsListItemSchemaMock,
+} from './index.mock';
+
+describe('import_list_item_schema', () => {
+ test('it should validate a typical item request', () => {
+ const payload = getImportExceptionsListItemSchemaMock();
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(getImportExceptionsListItemSchemaDecodedMock());
+ });
+
+ test('it should NOT accept an undefined for "item_id"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.item_id;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "item_id"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "list_id"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.list_id;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "list_id"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "description"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.description;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "description"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "name"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.name;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "name"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "type"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.type;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "type"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "entries"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.entries;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "entries"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should accept any partial fields', () => {
+ const payload: ImportExceptionListItemSchema = {
+ ...getImportExceptionsListItemSchemaMock(),
+ id: '123',
+ namespace_type: 'single',
+ comments: [],
+ os_types: [],
+ tags: ['123'],
+ created_at: '2018-08-24T17:49:30.145142000',
+ created_by: 'elastic',
+ updated_at: '2018-08-24T17:49:30.145142000',
+ updated_by: 'elastic',
+ tie_breaker_id: '123',
+ _version: '3',
+ meta: undefined,
+ };
+
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should not allow an extra key to be sent in', () => {
+ const payload: ImportExceptionListItemSchema & {
+ extraKey?: string;
+ } = getImportExceptionsListItemSchemaMock();
+ payload.extraKey = 'some new value';
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.ts
new file mode 100644
index 0000000000000..3da30a21a0115
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.ts
@@ -0,0 +1,87 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import { OsTypeArray, osTypeArrayOrUndefined } from '../../common/os_type';
+import { Tags } from '../../common/tags';
+import { NamespaceType } from '../../common/default_namespace';
+import { name } from '../../common/name';
+import { description } from '../../common/description';
+import { namespace_type } from '../../common/namespace_type';
+import { tags } from '../../common/tags';
+import { meta } from '../../common/meta';
+import { list_id } from '../../common/list_id';
+import { item_id } from '../../common/item_id';
+import { id } from '../../common/id';
+import { created_at } from '../../common/created_at';
+import { created_by } from '../../common/created_by';
+import { updated_at } from '../../common/updated_at';
+import { updated_by } from '../../common/updated_by';
+import { _version } from '../../common/underscore_version';
+import { tie_breaker_id } from '../../common/tie_breaker_id';
+import { nonEmptyEntriesArray } from '../../common/non_empty_entries_array';
+import { exceptionListItemType } from '../../common/exception_list_item_type';
+import { ItemId } from '../../common/item_id';
+import { EntriesArray } from '../../common/entries';
+import { CreateCommentsArray } from '../../common/create_comment';
+import { DefaultCreateCommentsArray } from '../../common/default_create_comments_array';
+
+/**
+ * Differences from this and the createExceptionsListItemSchema are
+ * - item_id is required
+ * - id is optional (but ignored in the import code - item_id is exclusively used for imports)
+ * - immutable is optional but if it is any value other than false it will be rejected
+ * - created_at is optional (but ignored in the import code)
+ * - updated_at is optional (but ignored in the import code)
+ * - created_by is optional (but ignored in the import code)
+ * - updated_by is optional (but ignored in the import code)
+ */
+export const importExceptionListItemSchema = t.intersection([
+ t.exact(
+ t.type({
+ description,
+ entries: nonEmptyEntriesArray,
+ item_id,
+ list_id,
+ name,
+ type: exceptionListItemType,
+ })
+ ),
+ t.exact(
+ t.partial({
+ id, // defaults to undefined if not set during decode
+ comments: DefaultCreateCommentsArray, // defaults to empty array if not set during decode
+ created_at, // defaults undefined if not set during decode
+ updated_at, // defaults undefined if not set during decode
+ created_by, // defaults undefined if not set during decode
+ updated_by, // defaults undefined if not set during decode
+ _version, // defaults to undefined if not set during decode
+ tie_breaker_id,
+ meta, // defaults to undefined if not set during decode
+ namespace_type, // defaults to 'single' if not set during decode
+ os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode
+ tags, // defaults to empty array if not set during decode
+ })
+ ),
+]);
+
+export type ImportExceptionListItemSchema = t.OutputOf;
+
+// This type is used after a decode since some things are defaults after a decode.
+export type ImportExceptionListItemSchemaDecoded = Omit<
+ ImportExceptionListItemSchema,
+ 'tags' | 'item_id' | 'entries' | 'namespace_type' | 'comments'
+> & {
+ comments: CreateCommentsArray;
+ tags: Tags;
+ item_id: ItemId;
+ entries: EntriesArray;
+ namespace_type: NamespaceType;
+ os_types: OsTypeArray;
+};
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.mock.ts
new file mode 100644
index 0000000000000..dc6aa8644c1f5
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.mock.ts
@@ -0,0 +1,30 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { ImportExceptionListSchemaDecoded, ImportExceptionsListSchema } from '.';
+
+export const getImportExceptionsListSchemaMock = (
+ listId = 'detection_list_id'
+): ImportExceptionsListSchema => ({
+ description: 'some description',
+ list_id: listId,
+ name: 'Query with a rule id',
+ type: 'detection',
+});
+
+export const getImportExceptionsListSchemaDecodedMock = (
+ listId = 'detection_list_id'
+): ImportExceptionListSchemaDecoded => ({
+ ...getImportExceptionsListSchemaMock(listId),
+ immutable: false,
+ meta: undefined,
+ namespace_type: 'single',
+ os_types: [],
+ tags: [],
+ version: 1,
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.test.ts
new file mode 100644
index 0000000000000..92a24cd4352f5
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.test.ts
@@ -0,0 +1,132 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { pipe } from 'fp-ts/lib/pipeable';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+import { importExceptionsListSchema, ImportExceptionsListSchema } from '.';
+import {
+ getImportExceptionsListSchemaMock,
+ getImportExceptionsListSchemaDecodedMock,
+} from './index.mock';
+
+describe('import_list_item_schema', () => {
+ test('it should validate a typical lists request', () => {
+ const payload = getImportExceptionsListSchemaMock();
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(getImportExceptionsListSchemaDecodedMock());
+ });
+
+ test('it should NOT accept an undefined for "list_id"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.list_id;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "list_id"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "description"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.description;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "description"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "name"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.name;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "name"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "type"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.type;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "type"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept value of "true" for "immutable"', () => {
+ const payload: ImportExceptionsListSchema = {
+ ...getImportExceptionsListSchemaMock(),
+ immutable: true,
+ };
+
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "true" supplied to "immutable"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should accept any partial fields', () => {
+ const payload: ImportExceptionsListSchema = {
+ ...getImportExceptionsListSchemaMock(),
+ namespace_type: 'single',
+ immutable: false,
+ os_types: [],
+ tags: ['123'],
+ created_at: '2018-08-24T17:49:30.145142000',
+ created_by: 'elastic',
+ updated_at: '2018-08-24T17:49:30.145142000',
+ updated_by: 'elastic',
+ version: 3,
+ tie_breaker_id: '123',
+ _version: '3',
+ meta: undefined,
+ };
+
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should not allow an extra key to be sent in', () => {
+ const payload: ImportExceptionsListSchema & {
+ extraKey?: string;
+ } = getImportExceptionsListSchemaMock();
+ payload.extraKey = 'some new value';
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.ts
new file mode 100644
index 0000000000000..610bbae97f579
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.ts
@@ -0,0 +1,87 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import {
+ DefaultVersionNumber,
+ DefaultVersionNumberDecoded,
+ OnlyFalseAllowed,
+} from '@kbn/securitysolution-io-ts-types';
+
+import { exceptionListType } from '../../common/exception_list';
+import { OsTypeArray, osTypeArrayOrUndefined } from '../../common/os_type';
+import { Tags } from '../../common/tags';
+import { ListId } from '../../common/list_id';
+import { NamespaceType } from '../../common/default_namespace';
+import { name } from '../../common/name';
+import { description } from '../../common/description';
+import { namespace_type } from '../../common/namespace_type';
+import { tags } from '../../common/tags';
+import { meta } from '../../common/meta';
+import { list_id } from '../../common/list_id';
+import { id } from '../../common/id';
+import { created_at } from '../../common/created_at';
+import { created_by } from '../../common/created_by';
+import { updated_at } from '../../common/updated_at';
+import { updated_by } from '../../common/updated_by';
+import { _version } from '../../common/underscore_version';
+import { tie_breaker_id } from '../../common/tie_breaker_id';
+
+/**
+ * Differences from this and the createExceptionsSchema are
+ * - list_id is required
+ * - id is optional (but ignored in the import code - list_id is exclusively used for imports)
+ * - immutable is optional but if it is any value other than false it will be rejected
+ * - created_at is optional (but ignored in the import code)
+ * - updated_at is optional (but ignored in the import code)
+ * - created_by is optional (but ignored in the import code)
+ * - updated_by is optional (but ignored in the import code)
+ */
+export const importExceptionsListSchema = t.intersection([
+ t.exact(
+ t.type({
+ description,
+ name,
+ type: exceptionListType,
+ list_id,
+ })
+ ),
+ t.exact(
+ t.partial({
+ id, // defaults to undefined if not set during decode
+ immutable: OnlyFalseAllowed,
+ meta, // defaults to undefined if not set during decode
+ namespace_type, // defaults to 'single' if not set during decode
+ os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode
+ tags, // defaults to empty array if not set during decode
+ created_at, // defaults "undefined" if not set during decode
+ updated_at, // defaults "undefined" if not set during decode
+ created_by, // defaults "undefined" if not set during decode
+ updated_by, // defaults "undefined" if not set during decode
+ _version, // defaults to undefined if not set during decode
+ tie_breaker_id,
+ version: DefaultVersionNumber, // defaults to numerical 1 if not set during decode
+ })
+ ),
+]);
+
+export type ImportExceptionsListSchema = t.TypeOf;
+
+// This type is used after a decode since some things are defaults after a decode.
+export type ImportExceptionListSchemaDecoded = Omit<
+ ImportExceptionsListSchema,
+ 'tags' | 'list_id' | 'namespace_type' | 'os_types' | 'immutable'
+> & {
+ immutable: false;
+ tags: Tags;
+ list_id: ListId;
+ namespace_type: NamespaceType;
+ os_types: OsTypeArray;
+ version: DefaultVersionNumberDecoded;
+};
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts
index 3d3c41aed5a72..da8bd7ed8306e 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts
@@ -23,6 +23,8 @@ export * from './find_exception_list_item_schema';
export * from './find_list_item_schema';
export * from './find_list_schema';
export * from './import_list_item_query_schema';
+export * from './import_exception_list_schema';
+export * from './import_exception_item_schema';
export * from './import_list_item_schema';
export * from './patch_list_item_schema';
export * from './patch_list_schema';
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.mock.ts
new file mode 100644
index 0000000000000..d4c17c7f9422e
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.mock.ts
@@ -0,0 +1,23 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { ImportExceptionsResponseSchema } from '.';
+
+export const getImportExceptionsResponseSchemaMock = (
+ success = 0,
+ lists = 0,
+ items = 0
+): ImportExceptionsResponseSchema => ({
+ errors: [],
+ success: true,
+ success_count: success,
+ success_exception_lists: true,
+ success_count_exception_lists: lists,
+ success_exception_list_items: true,
+ success_count_exception_list_items: items,
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.test.ts
new file mode 100644
index 0000000000000..dc6780d4b1ce2
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.test.ts
@@ -0,0 +1,129 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { pipe } from 'fp-ts/lib/pipeable';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+import { importExceptionsResponseSchema, ImportExceptionsResponseSchema } from '.';
+import { getImportExceptionsResponseSchemaMock } from './index.mock';
+
+describe('importExceptionsResponseSchema', () => {
+ test('it should validate a typical exceptions import response', () => {
+ const payload = getImportExceptionsResponseSchemaMock();
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should NOT accept an undefined for "errors"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.errors;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "errors"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_count"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_count;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_count"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_exception_lists"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_exception_lists;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_exception_lists"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_count_exception_lists"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_count_exception_lists;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_count_exception_lists"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_exception_list_items"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_exception_list_items;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_exception_list_items"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_count_exception_list_items"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_count_exception_list_items;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_count_exception_list_items"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should not allow an extra key to be sent in', () => {
+ const payload: ImportExceptionsResponseSchema & {
+ extraKey?: string;
+ } = getImportExceptionsResponseSchemaMock();
+ payload.extraKey = 'some new value';
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.ts
new file mode 100644
index 0000000000000..f50356d2789f8
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.ts
@@ -0,0 +1,51 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import { PositiveInteger } from '@kbn/securitysolution-io-ts-types';
+
+import { id } from '../../common/id';
+import { list_id } from '../../common/list_id';
+import { item_id } from '../../common/item_id';
+
+export const bulkErrorErrorSchema = t.exact(
+ t.type({
+ status_code: t.number,
+ message: t.string,
+ })
+);
+
+export const bulkErrorSchema = t.intersection([
+ t.exact(
+ t.type({
+ error: bulkErrorErrorSchema,
+ })
+ ),
+ t.partial({
+ id,
+ list_id,
+ item_id,
+ }),
+]);
+
+export type BulkErrorSchema = t.TypeOf;
+
+export const importExceptionsResponseSchema = t.exact(
+ t.type({
+ errors: t.array(bulkErrorSchema),
+ success: t.boolean,
+ success_count: PositiveInteger,
+ success_exception_lists: t.boolean,
+ success_count_exception_lists: PositiveInteger,
+ success_exception_list_items: t.boolean,
+ success_count_exception_list_items: PositiveInteger,
+ })
+);
+
+export type ImportExceptionsResponseSchema = t.TypeOf;
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts
index dc29bdf16ab48..c37b092eb3477 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts
@@ -14,6 +14,7 @@ export * from './found_exception_list_item_schema';
export * from './found_exception_list_schema';
export * from './found_list_item_schema';
export * from './found_list_schema';
+export * from './import_exceptions_schema';
export * from './list_item_schema';
export * from './list_schema';
export * from './exception_list_summary_schema';
diff --git a/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.test.ts
new file mode 100644
index 0000000000000..03ec9df51a318
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.test.ts
@@ -0,0 +1,54 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { ImportQuerySchema, importQuerySchema } from '.';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+describe('importQuerySchema', () => {
+ test('it should validate proper schema', () => {
+ const payload = {
+ overwrite: true,
+ };
+ const decoded = importQuerySchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = foldLeftRight(checked);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should NOT validate a non boolean value for "overwrite"', () => {
+ const payload: Omit & { overwrite: string } = {
+ overwrite: 'wrong',
+ };
+ const decoded = importQuerySchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = foldLeftRight(checked);
+
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "wrong" supplied to "overwrite"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT allow an extra key to be sent in', () => {
+ const payload: ImportQuerySchema & {
+ extraKey?: string;
+ } = {
+ extraKey: 'extra',
+ overwrite: true,
+ };
+
+ const decoded = importQuerySchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = foldLeftRight(checked);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.ts b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.ts
new file mode 100644
index 0000000000000..95cbf96b2ef8d
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.ts
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import { DefaultStringBooleanFalse } from '../default_string_boolean_false';
+
+export const importQuerySchema = t.exact(
+ t.partial({
+ overwrite: DefaultStringBooleanFalse,
+ })
+);
+
+export type ImportQuerySchema = t.TypeOf;
+export type ImportQuerySchemaDecoded = Omit & {
+ overwrite: boolean;
+};
diff --git a/packages/kbn-securitysolution-io-ts-types/src/index.ts b/packages/kbn-securitysolution-io-ts-types/src/index.ts
index b85bff63fe2a7..0bb99e4c766e7 100644
--- a/packages/kbn-securitysolution-io-ts-types/src/index.ts
+++ b/packages/kbn-securitysolution-io-ts-types/src/index.ts
@@ -17,6 +17,7 @@ export * from './default_version_number';
export * from './empty_string_array';
export * from './enumeration';
export * from './iso_date_string';
+export * from './import_query_schema';
export * from './non_empty_array';
export * from './non_empty_or_nullable_string_array';
export * from './non_empty_string_array';
diff --git a/packages/kbn-securitysolution-list-utils/BUILD.bazel b/packages/kbn-securitysolution-list-utils/BUILD.bazel
index eb33eb1a03b66..30568ca725041 100644
--- a/packages/kbn-securitysolution-list-utils/BUILD.bazel
+++ b/packages/kbn-securitysolution-list-utils/BUILD.bazel
@@ -38,11 +38,12 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-es-query",
- "//packages/kbn-i18n",
+ "//packages/kbn-es-query:npm_module_types",
+ "//packages/kbn-i18n:npm_module_types",
"//packages/kbn-securitysolution-io-ts-list-types",
"//packages/kbn-securitysolution-list-constants",
"//packages/kbn-securitysolution-utils",
+ "@npm//@elastic/elasticsearch",
"@npm//@types/jest",
"@npm//@types/lodash",
"@npm//@types/node",
diff --git a/packages/kbn-storybook/BUILD.bazel b/packages/kbn-storybook/BUILD.bazel
index f2a7bf25fb407..5dbe22b56c63f 100644
--- a/packages/kbn-storybook/BUILD.bazel
+++ b/packages/kbn-storybook/BUILD.bazel
@@ -32,6 +32,7 @@ RUNTIME_DEPS = [
"//packages/kbn-dev-utils",
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
+ "//packages/kbn-utils",
"@npm//@storybook/addons",
"@npm//@storybook/api",
"@npm//@storybook/components",
@@ -47,9 +48,10 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
+ "//packages/kbn-utils",
"@npm//@storybook/addons",
"@npm//@storybook/api",
"@npm//@storybook/components",
diff --git a/packages/kbn-storybook/src/lib/constants.ts b/packages/kbn-storybook/src/lib/constants.ts
index 722f789fde786..69b05c94ea1b0 100644
--- a/packages/kbn-storybook/src/lib/constants.ts
+++ b/packages/kbn-storybook/src/lib/constants.ts
@@ -7,7 +7,7 @@
*/
import { resolve } from 'path';
-import { REPO_ROOT as KIBANA_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT as KIBANA_ROOT } from '@kbn/utils';
export const REPO_ROOT = KIBANA_ROOT;
export const ASSET_DIR = resolve(KIBANA_ROOT, 'built_assets/storybook');
diff --git a/packages/kbn-storybook/src/lib/theme_switcher.tsx b/packages/kbn-storybook/src/lib/theme_switcher.tsx
index 3d6f7999545a0..8cc805ee2e494 100644
--- a/packages/kbn-storybook/src/lib/theme_switcher.tsx
+++ b/packages/kbn-storybook/src/lib/theme_switcher.tsx
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import React from 'react';
+import React, { useCallback, useEffect } from 'react';
import { Icons, IconButton, TooltipLinkList, WithTooltip } from '@storybook/components';
import { useGlobals } from '@storybook/api';
@@ -17,14 +17,52 @@ type Link = ArrayItem['links']>;
const defaultTheme = 'v8.light';
export function ThemeSwitcher() {
- const [globals, updateGlobals] = useGlobals();
- const selectedTheme = globals.euiTheme;
+ const [{ euiTheme: selectedTheme }, updateGlobals] = useGlobals();
- if (!selectedTheme) {
- updateGlobals({ euiTheme: defaultTheme });
- }
+ const selectTheme = useCallback(
+ (themeId: string) => {
+ updateGlobals({ euiTheme: themeId });
+ },
+ [updateGlobals]
+ );
- function Menu({ onHide }: { onHide: () => void }) {
+ useEffect(() => {
+ if (!selectedTheme) {
+ selectTheme(defaultTheme);
+ }
+ }, [selectTheme, selectedTheme]);
+
+ return (
+ (
+
+ )}
+ >
+ {/* @ts-ignore Remove when @storybook has moved to @emotion v11 */}
+
+
+
+
+ );
+}
+
+const ThemeSwitcherTooltip = React.memo(
+ ({
+ onHide,
+ onChangeSelectedTheme,
+ selectedTheme,
+ }: {
+ onHide: () => void;
+ onChangeSelectedTheme: (themeId: string) => void;
+ selectedTheme: string;
+ }) => {
const links = [
{
id: 'v8.light',
@@ -38,8 +76,8 @@ export function ThemeSwitcher() {
(link): Link => ({
...link,
onClick: (_event, item) => {
- if (item.id !== selectedTheme) {
- updateGlobals({ euiTheme: item.id });
+ if (item.id != null && item.id !== selectedTheme) {
+ onChangeSelectedTheme(item.id);
}
onHide();
},
@@ -49,18 +87,4 @@ export function ThemeSwitcher() {
return ;
}
-
- return (
- }
- >
- {/* @ts-ignore Remove when @storybook has moved to @emotion v11 */}
-
-
-
-
- );
-}
+);
diff --git a/packages/kbn-telemetry-tools/BUILD.bazel b/packages/kbn-telemetry-tools/BUILD.bazel
index 1183de2586424..d2ea3a704f154 100644
--- a/packages/kbn-telemetry-tools/BUILD.bazel
+++ b/packages/kbn-telemetry-tools/BUILD.bazel
@@ -38,8 +38,9 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-utility-types",
+ "@npm//tslib",
"@npm//@types/glob",
"@npm//@types/jest",
"@npm//@types/listr",
diff --git a/packages/kbn-test/BUILD.bazel b/packages/kbn-test/BUILD.bazel
index c42c33483703e..eae0fe2cdf5dc 100644
--- a/packages/kbn-test/BUILD.bazel
+++ b/packages/kbn-test/BUILD.bazel
@@ -44,11 +44,13 @@ RUNTIME_DEPS = [
"@npm//axios",
"@npm//@babel/traverse",
"@npm//chance",
+ "@npm//dedent",
"@npm//del",
"@npm//enzyme",
"@npm//execa",
"@npm//exit-hook",
"@npm//form-data",
+ "@npm//getopts",
"@npm//globby",
"@npm//he",
"@npm//history",
@@ -59,6 +61,7 @@ RUNTIME_DEPS = [
"@npm//@jest/reporters",
"@npm//joi",
"@npm//mustache",
+ "@npm//normalize-path",
"@npm//parse-link-header",
"@npm//prettier",
"@npm//react-dom",
@@ -72,12 +75,17 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-i18n-react:npm_module_types",
+ "//packages/kbn-std",
"//packages/kbn-utils",
"@npm//@elastic/elasticsearch",
+ "@npm//axios",
+ "@npm//elastic-apm-node",
"@npm//del",
+ "@npm//exit-hook",
"@npm//form-data",
+ "@npm//getopts",
"@npm//jest",
"@npm//jest-cli",
"@npm//jest-snapshot",
@@ -85,6 +93,7 @@ TYPES_DEPS = [
"@npm//rxjs",
"@npm//xmlbuilder",
"@npm//@types/chance",
+ "@npm//@types/dedent",
"@npm//@types/enzyme",
"@npm//@types/he",
"@npm//@types/history",
@@ -92,6 +101,7 @@ TYPES_DEPS = [
"@npm//@types/joi",
"@npm//@types/lodash",
"@npm//@types/mustache",
+ "@npm//@types/normalize-path",
"@npm//@types/node",
"@npm//@types/parse-link-header",
"@npm//@types/prettier",
diff --git a/packages/kbn-test/jest-preset.js b/packages/kbn-test/jest-preset.js
index db64f070b37d9..e2607100babc5 100644
--- a/packages/kbn-test/jest-preset.js
+++ b/packages/kbn-test/jest-preset.js
@@ -28,6 +28,7 @@ module.exports = {
moduleNameMapper: {
'@elastic/eui/lib/(.*)?': '/node_modules/@elastic/eui/test-env/$1',
'@elastic/eui$': '/node_modules/@elastic/eui/test-env',
+ 'elastic-apm-node': '/node_modules/@kbn/test/target_node/jest/mocks/apm_agent_mock.js',
'\\.module.(css|scss)$':
'/node_modules/@kbn/test/target_node/jest/mocks/css_module_mock.js',
'\\.(css|less|scss)$': '/node_modules/@kbn/test/target_node/jest/mocks/style_mock.js',
diff --git a/packages/kbn-test/src/es/es_test_config.ts b/packages/kbn-test/src/es/es_test_config.ts
index db5d705710a75..70000c8068e9f 100644
--- a/packages/kbn-test/src/es/es_test_config.ts
+++ b/packages/kbn-test/src/es/es_test_config.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { kibanaPackageJson as pkg } from '@kbn/dev-utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import Url from 'url';
import { adminTestUser } from '../kbn';
diff --git a/packages/kbn-test/src/failed_tests_reporter/buildkite_metadata.ts b/packages/kbn-test/src/failed_tests_reporter/buildkite_metadata.ts
new file mode 100644
index 0000000000000..d63f0166390cb
--- /dev/null
+++ b/packages/kbn-test/src/failed_tests_reporter/buildkite_metadata.ts
@@ -0,0 +1,38 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export interface BuildkiteMetadata {
+ buildId?: string;
+ jobId?: string;
+ url?: string;
+ jobName?: string;
+ jobUrl?: string;
+}
+
+export function getBuildkiteMetadata(): BuildkiteMetadata {
+ // Buildkite steps that use `parallelism` need a numerical suffix added to identify them
+ // We should also increment the number by one, since it's 0-based
+ const jobNumberSuffix = process.env.BUILDKITE_PARALLEL_JOB
+ ? ` #${parseInt(process.env.BUILDKITE_PARALLEL_JOB, 10) + 1}`
+ : '';
+
+ const buildUrl = process.env.BUILDKITE_BUILD_URL;
+ const jobUrl = process.env.BUILDKITE_JOB_ID
+ ? `${buildUrl}#${process.env.BUILDKITE_JOB_ID}`
+ : undefined;
+
+ return {
+ buildId: process.env.BUJILDKITE_BUILD_ID,
+ jobId: process.env.BUILDKITE_JOB_ID,
+ url: buildUrl,
+ jobUrl,
+ jobName: process.env.BUILDKITE_LABEL
+ ? `${process.env.BUILDKITE_LABEL}${jobNumberSuffix}`
+ : undefined,
+ };
+}
diff --git a/packages/kbn-test/src/failed_tests_reporter/github_api.ts b/packages/kbn-test/src/failed_tests_reporter/github_api.ts
index adaae11b7aa16..bb7570225a013 100644
--- a/packages/kbn-test/src/failed_tests_reporter/github_api.ts
+++ b/packages/kbn-test/src/failed_tests_reporter/github_api.ts
@@ -42,6 +42,7 @@ export class GithubApi {
private readonly token: string | undefined;
private readonly dryRun: boolean;
private readonly x: AxiosInstance;
+ private requestCount: number = 0;
/**
* Create a GithubApi helper object, if token is undefined requests won't be
@@ -68,6 +69,10 @@ export class GithubApi {
});
}
+ getRequestCount() {
+ return this.requestCount;
+ }
+
private failedTestIssuesPageCache: {
pages: GithubIssue[][];
nextRequest: RequestOptions | undefined;
@@ -191,53 +196,50 @@ export class GithubApi {
}> {
const executeRequest = !this.dryRun || options.safeForDryRun;
const maxAttempts = options.maxAttempts || 5;
- const attempt = options.attempt || 1;
-
- this.log.verbose('Github API', executeRequest ? 'Request' : 'Dry Run', options);
-
- if (!executeRequest) {
- return {
- status: 200,
- statusText: 'OK',
- headers: {},
- data: dryRunResponse,
- };
- }
- try {
- return await this.x.request(options);
- } catch (error) {
- const unableToReachGithub = isAxiosRequestError(error);
- const githubApiFailed = isAxiosResponseError(error) && error.response.status >= 500;
- const errorResponseLog =
- isAxiosResponseError(error) &&
- `[${error.config.method} ${error.config.url}] ${error.response.status} ${error.response.statusText} Error`;
+ let attempt = 0;
+ while (true) {
+ attempt += 1;
+ this.log.verbose('Github API', executeRequest ? 'Request' : 'Dry Run', options);
+
+ if (!executeRequest) {
+ return {
+ status: 200,
+ statusText: 'OK',
+ headers: {},
+ data: dryRunResponse,
+ };
+ }
- if ((unableToReachGithub || githubApiFailed) && attempt < maxAttempts) {
- const waitMs = 1000 * attempt;
+ try {
+ this.requestCount += 1;
+ return await this.x.request(options);
+ } catch (error) {
+ const unableToReachGithub = isAxiosRequestError(error);
+ const githubApiFailed = isAxiosResponseError(error) && error.response.status >= 500;
+ const errorResponseLog =
+ isAxiosResponseError(error) &&
+ `[${error.config.method} ${error.config.url}] ${error.response.status} ${error.response.statusText} Error`;
+
+ if ((unableToReachGithub || githubApiFailed) && attempt < maxAttempts) {
+ const waitMs = 1000 * attempt;
+
+ if (errorResponseLog) {
+ this.log.error(`${errorResponseLog}: waiting ${waitMs}ms to retry`);
+ } else {
+ this.log.error(`Unable to reach github, waiting ${waitMs}ms to retry`);
+ }
+
+ await new Promise((resolve) => setTimeout(resolve, waitMs));
+ continue;
+ }
if (errorResponseLog) {
- this.log.error(`${errorResponseLog}: waiting ${waitMs}ms to retry`);
- } else {
- this.log.error(`Unable to reach github, waiting ${waitMs}ms to retry`);
+ throw new Error(`${errorResponseLog}: ${JSON.stringify(error.response.data)}`);
}
- await new Promise((resolve) => setTimeout(resolve, waitMs));
- return await this.request(
- {
- ...options,
- maxAttempts,
- attempt: attempt + 1,
- },
- dryRunResponse
- );
+ throw error;
}
-
- if (errorResponseLog) {
- throw new Error(`${errorResponseLog}: ${JSON.stringify(error.response.data)}`);
- }
-
- throw error;
}
}
}
diff --git a/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts b/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts
index e481da019945c..33dab240ec8b4 100644
--- a/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts
+++ b/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts
@@ -14,6 +14,7 @@ import { ToolingLog } from '@kbn/dev-utils';
import { REPO_ROOT } from '@kbn/utils';
import { escape } from 'he';
+import { BuildkiteMetadata } from './buildkite_metadata';
import { TestFailure } from './get_failures';
const findScreenshots = (dirPath: string, allScreenshots: string[] = []) => {
@@ -37,7 +38,11 @@ const findScreenshots = (dirPath: string, allScreenshots: string[] = []) => {
return allScreenshots;
};
-export function reportFailuresToFile(log: ToolingLog, failures: TestFailure[]) {
+export function reportFailuresToFile(
+ log: ToolingLog,
+ failures: TestFailure[],
+ bkMeta: BuildkiteMetadata
+) {
if (!failures?.length) {
return;
}
@@ -76,28 +81,15 @@ export function reportFailuresToFile(log: ToolingLog, failures: TestFailure[]) {
.flat()
.join('\n');
- // Buildkite steps that use `parallelism` need a numerical suffix added to identify them
- // We should also increment the number by one, since it's 0-based
- const jobNumberSuffix = process.env.BUILDKITE_PARALLEL_JOB
- ? ` #${parseInt(process.env.BUILDKITE_PARALLEL_JOB, 10) + 1}`
- : '';
-
- const buildUrl = process.env.BUILDKITE_BUILD_URL || '';
- const jobUrl = process.env.BUILDKITE_JOB_ID
- ? `${buildUrl}#${process.env.BUILDKITE_JOB_ID}`
- : '';
-
const failureJSON = JSON.stringify(
{
...failure,
hash,
- buildId: process.env.BUJILDKITE_BUILD_ID || '',
- jobId: process.env.BUILDKITE_JOB_ID || '',
- url: buildUrl,
- jobUrl,
- jobName: process.env.BUILDKITE_LABEL
- ? `${process.env.BUILDKITE_LABEL}${jobNumberSuffix}`
- : '',
+ buildId: bkMeta.buildId,
+ jobId: bkMeta.jobId,
+ url: bkMeta.url,
+ jobUrl: bkMeta.jobUrl,
+ jobName: bkMeta.jobName,
},
null,
2
@@ -149,11 +141,11 @@ export function reportFailuresToFile(log: ToolingLog, failures: TestFailure[]) {
${
- jobUrl
+ bkMeta.jobUrl
? `
Buildkite Job
- ${escape(jobUrl)}
+ ${escape(bkMeta.jobUrl)}
`
: ''
diff --git a/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts b/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts
index 193bc668ce003..6ab135a6afa7e 100644
--- a/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts
+++ b/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts
@@ -9,7 +9,7 @@
import Path from 'path';
import { REPO_ROOT } from '@kbn/utils';
-import { run, createFailError, createFlagError } from '@kbn/dev-utils';
+import { run, createFailError, createFlagError, CiStatsReporter } from '@kbn/dev-utils';
import globby from 'globby';
import normalize from 'normalize-path';
@@ -22,6 +22,7 @@ import { addMessagesToReport } from './add_messages_to_report';
import { getReportMessageIter } from './report_metadata';
import { reportFailuresToEs } from './report_failures_to_es';
import { reportFailuresToFile } from './report_failures_to_file';
+import { getBuildkiteMetadata } from './buildkite_metadata';
const DEFAULT_PATTERNS = [Path.resolve(REPO_ROOT, 'target/junit/**/*.xml')];
@@ -71,108 +72,129 @@ export function runFailedTestsReporterCli() {
dryRun: !updateGithub,
});
- const buildUrl = flags['build-url'] || (updateGithub ? '' : 'http://buildUrl');
- if (typeof buildUrl !== 'string' || !buildUrl) {
- throw createFlagError('Missing --build-url or process.env.BUILD_URL');
- }
+ const bkMeta = getBuildkiteMetadata();
- const patterns = (flags._.length ? flags._ : DEFAULT_PATTERNS).map((p) =>
- normalize(Path.resolve(p))
- );
- log.info('Searching for reports at', patterns);
- const reportPaths = await globby(patterns, {
- absolute: true,
- });
+ try {
+ const buildUrl = flags['build-url'] || (updateGithub ? '' : 'http://buildUrl');
+ if (typeof buildUrl !== 'string' || !buildUrl) {
+ throw createFlagError('Missing --build-url or process.env.BUILD_URL');
+ }
- if (!reportPaths.length) {
- throw createFailError(`Unable to find any junit reports with patterns [${patterns}]`);
- }
+ const patterns = (flags._.length ? flags._ : DEFAULT_PATTERNS).map((p) =>
+ normalize(Path.resolve(p))
+ );
+ log.info('Searching for reports at', patterns);
+ const reportPaths = await globby(patterns, {
+ absolute: true,
+ });
- log.info('found', reportPaths.length, 'junit reports', reportPaths);
- const newlyCreatedIssues: Array<{
- failure: TestFailure;
- newIssue: GithubIssueMini;
- }> = [];
+ if (!reportPaths.length) {
+ throw createFailError(`Unable to find any junit reports with patterns [${patterns}]`);
+ }
- for (const reportPath of reportPaths) {
- const report = await readTestReport(reportPath);
- const messages = Array.from(getReportMessageIter(report));
- const failures = await getFailures(report);
+ log.info('found', reportPaths.length, 'junit reports', reportPaths);
+ const newlyCreatedIssues: Array<{
+ failure: TestFailure;
+ newIssue: GithubIssueMini;
+ }> = [];
- if (indexInEs) {
- await reportFailuresToEs(log, failures);
- }
+ for (const reportPath of reportPaths) {
+ const report = await readTestReport(reportPath);
+ const messages = Array.from(getReportMessageIter(report));
+ const failures = await getFailures(report);
- for (const failure of failures) {
- const pushMessage = (msg: string) => {
- messages.push({
- classname: failure.classname,
- name: failure.name,
- message: msg,
- });
- };
-
- if (failure.likelyIrrelevant) {
- pushMessage(
- 'Failure is likely irrelevant' +
- (updateGithub ? ', so an issue was not created or updated' : '')
- );
- continue;
+ if (indexInEs) {
+ await reportFailuresToEs(log, failures);
}
- let existingIssue: GithubIssueMini | undefined = await githubApi.findFailedTestIssue(
- (i) =>
- getIssueMetadata(i.body, 'test.class') === failure.classname &&
- getIssueMetadata(i.body, 'test.name') === failure.name
- );
+ for (const failure of failures) {
+ const pushMessage = (msg: string) => {
+ messages.push({
+ classname: failure.classname,
+ name: failure.name,
+ message: msg,
+ });
+ };
+
+ if (failure.likelyIrrelevant) {
+ pushMessage(
+ 'Failure is likely irrelevant' +
+ (updateGithub ? ', so an issue was not created or updated' : '')
+ );
+ continue;
+ }
- if (!existingIssue) {
- const newlyCreated = newlyCreatedIssues.find(
- ({ failure: f }) => f.classname === failure.classname && f.name === failure.name
- );
+ let existingIssue: GithubIssueMini | undefined = updateGithub
+ ? await githubApi.findFailedTestIssue(
+ (i) =>
+ getIssueMetadata(i.body, 'test.class') === failure.classname &&
+ getIssueMetadata(i.body, 'test.name') === failure.name
+ )
+ : undefined;
+
+ if (!existingIssue) {
+ const newlyCreated = newlyCreatedIssues.find(
+ ({ failure: f }) => f.classname === failure.classname && f.name === failure.name
+ );
+
+ if (newlyCreated) {
+ existingIssue = newlyCreated.newIssue;
+ }
+ }
- if (newlyCreated) {
- existingIssue = newlyCreated.newIssue;
+ if (existingIssue) {
+ const newFailureCount = await updateFailureIssue(
+ buildUrl,
+ existingIssue,
+ githubApi,
+ branch
+ );
+ const url = existingIssue.html_url;
+ failure.githubIssue = url;
+ failure.failureCount = updateGithub ? newFailureCount : newFailureCount - 1;
+ pushMessage(
+ `Test has failed ${newFailureCount - 1} times on tracked branches: ${url}`
+ );
+ if (updateGithub) {
+ pushMessage(`Updated existing issue: ${url} (fail count: ${newFailureCount})`);
+ }
+ continue;
}
- }
- if (existingIssue) {
- const newFailureCount = await updateFailureIssue(
- buildUrl,
- existingIssue,
- githubApi,
- branch
- );
- const url = existingIssue.html_url;
- failure.githubIssue = url;
- failure.failureCount = updateGithub ? newFailureCount : newFailureCount - 1;
- pushMessage(`Test has failed ${newFailureCount - 1} times on tracked branches: ${url}`);
+ const newIssue = await createFailureIssue(buildUrl, failure, githubApi, branch);
+ pushMessage('Test has not failed recently on tracked branches');
if (updateGithub) {
- pushMessage(`Updated existing issue: ${url} (fail count: ${newFailureCount})`);
+ pushMessage(`Created new issue: ${newIssue.html_url}`);
+ failure.githubIssue = newIssue.html_url;
}
- continue;
- }
-
- const newIssue = await createFailureIssue(buildUrl, failure, githubApi, branch);
- pushMessage('Test has not failed recently on tracked branches');
- if (updateGithub) {
- pushMessage(`Created new issue: ${newIssue.html_url}`);
- failure.githubIssue = newIssue.html_url;
+ newlyCreatedIssues.push({ failure, newIssue });
+ failure.failureCount = updateGithub ? 1 : 0;
}
- newlyCreatedIssues.push({ failure, newIssue });
- failure.failureCount = updateGithub ? 1 : 0;
- }
- // mutates report to include messages and writes updated report to disk
- await addMessagesToReport({
- report,
- messages,
- log,
- reportPath,
- dryRun: !flags['report-update'],
- });
+ // mutates report to include messages and writes updated report to disk
+ await addMessagesToReport({
+ report,
+ messages,
+ log,
+ reportPath,
+ dryRun: !flags['report-update'],
+ });
- reportFailuresToFile(log, failures);
+ reportFailuresToFile(log, failures, bkMeta);
+ }
+ } finally {
+ await CiStatsReporter.fromEnv(log).metrics([
+ {
+ group: 'github api request count',
+ id: `failed test reporter`,
+ value: githubApi.getRequestCount(),
+ meta: Object.fromEntries(
+ Object.entries(bkMeta).map(
+ ([k, v]) => [`buildkite${k[0].toUpperCase()}${k.slice(1)}`, v] as const
+ )
+ ),
+ },
+ ]);
}
},
{
diff --git a/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js b/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js
index 3446c5be5d4a7..4f798839d7231 100644
--- a/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js
+++ b/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js
@@ -8,7 +8,7 @@
import Path from 'path';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
/**
* Traverse the suites configured and ensure that each suite has no more than one ciGroup assigned
diff --git a/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts b/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts
index e87f316a100a7..53ce4c74c1388 100644
--- a/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts
+++ b/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts
@@ -14,7 +14,7 @@ jest.mock('@kbn/utils', () => {
return { REPO_ROOT: '/dev/null/root' };
});
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Lifecycle } from './lifecycle';
import { SuiteTracker } from './suite_tracker';
import { Suite } from '../fake_mocha_types';
diff --git a/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js b/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js
index 03947f7e267ba..63d2b56350ba1 100644
--- a/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js
+++ b/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js
@@ -9,7 +9,7 @@
const Fs = require('fs');
const Path = require('path');
-const { REPO_ROOT: REPO_ROOT_FOLLOWING_SYMLINKS } = require('@kbn/dev-utils');
+const { REPO_ROOT: REPO_ROOT_FOLLOWING_SYMLINKS } = require('@kbn/utils');
const BASE_REPO_ROOT = Path.resolve(
Fs.realpathSync(Path.resolve(REPO_ROOT_FOLLOWING_SYMLINKS, 'package.json')),
'..'
diff --git a/packages/kbn-test/src/functional_tests/tasks.ts b/packages/kbn-test/src/functional_tests/tasks.ts
index 6dde114d3a98e..6a6c7edb98c79 100644
--- a/packages/kbn-test/src/functional_tests/tasks.ts
+++ b/packages/kbn-test/src/functional_tests/tasks.ts
@@ -9,7 +9,8 @@
import { relative } from 'path';
import * as Rx from 'rxjs';
import { startWith, switchMap, take } from 'rxjs/operators';
-import { withProcRunner, ToolingLog, REPO_ROOT, getTimeReporter } from '@kbn/dev-utils';
+import { withProcRunner, ToolingLog, getTimeReporter } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import dedent from 'dedent';
import {
diff --git a/packages/kbn-test/src/jest/mocks/apm_agent_mock.ts b/packages/kbn-test/src/jest/mocks/apm_agent_mock.ts
new file mode 100644
index 0000000000000..1615f710504ad
--- /dev/null
+++ b/packages/kbn-test/src/jest/mocks/apm_agent_mock.ts
@@ -0,0 +1,63 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { Agent } from 'elastic-apm-node';
+
+/**
+ * `elastic-apm-node` patches the runtime at import time
+ * causing memory leak with jest module sandbox, so it
+ * needs to be mocked for tests
+ */
+const agent: jest.Mocked = {
+ start: jest.fn().mockImplementation(() => agent),
+ isStarted: jest.fn().mockReturnValue(false),
+ getServiceName: jest.fn().mockReturnValue('mock-service'),
+ setFramework: jest.fn(),
+ addPatch: jest.fn(),
+ removePatch: jest.fn(),
+ clearPatches: jest.fn(),
+ lambda: jest.fn(),
+ handleUncaughtExceptions: jest.fn(),
+ captureError: jest.fn(),
+ currentTraceparent: null,
+ currentTraceIds: {},
+ startTransaction: jest.fn().mockReturnValue(null),
+ setTransactionName: jest.fn(),
+ endTransaction: jest.fn(),
+ currentTransaction: null,
+ startSpan: jest.fn(),
+ currentSpan: null,
+ setLabel: jest.fn().mockReturnValue(false),
+ addLabels: jest.fn().mockReturnValue(false),
+ setUserContext: jest.fn(),
+ setCustomContext: jest.fn(),
+ addFilter: jest.fn(),
+ addErrorFilter: jest.fn(),
+ addSpanFilter: jest.fn(),
+ addTransactionFilter: jest.fn(),
+ addMetadataFilter: jest.fn(),
+ flush: jest.fn(),
+ destroy: jest.fn(),
+ registerMetric: jest.fn(),
+ setTransactionOutcome: jest.fn(),
+ setSpanOutcome: jest.fn(),
+ middleware: {
+ connect: jest.fn().mockReturnValue(jest.fn()),
+ },
+ logger: {
+ fatal: jest.fn(),
+ error: jest.fn(),
+ warn: jest.fn(),
+ info: jest.fn(),
+ debug: jest.fn(),
+ trace: jest.fn(),
+ },
+};
+
+// eslint-disable-next-line import/no-default-export
+export default agent;
diff --git a/packages/kbn-test/src/kbn/users.ts b/packages/kbn-test/src/kbn/users.ts
index 230354089dcac..88480fde74ddc 100644
--- a/packages/kbn-test/src/kbn/users.ts
+++ b/packages/kbn-test/src/kbn/users.ts
@@ -14,7 +14,7 @@ export const kibanaTestUser = {
};
export const kibanaServerTestUser = {
- username: env.TEST_KIBANA_SERVER_USER || 'kibana',
+ username: env.TEST_KIBANA_SERVER_USER || 'kibana_system',
password: env.TEST_KIBANA_SERVER_PASS || 'changeme',
};
diff --git a/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts b/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts
index 4adae7d1cd031..6da34228bbe7f 100644
--- a/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts
+++ b/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts
@@ -12,7 +12,8 @@ import { existsSync } from 'fs';
import Path from 'path';
import FormData from 'form-data';
-import { ToolingLog, isAxiosResponseError, createFailError, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog, isAxiosResponseError, createFailError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { KbnClientRequester, uriencode, ReqOptions } from './kbn_client_requester';
import { KbnClientSavedObjects } from './kbn_client_saved_objects';
diff --git a/packages/kbn-typed-react-router-config/BUILD.bazel b/packages/kbn-typed-react-router-config/BUILD.bazel
index b347915ae3310..d759948a6c576 100644
--- a/packages/kbn-typed-react-router-config/BUILD.bazel
+++ b/packages/kbn-typed-react-router-config/BUILD.bazel
@@ -41,10 +41,10 @@ TYPES_DEPS = [
"@npm//query-string",
"@npm//utility-types",
"@npm//@types/jest",
- "@npm//@types/history",
"@npm//@types/node",
"@npm//@types/react-router-config",
"@npm//@types/react-router-dom",
+ "@npm//@types/history",
]
jsts_transpiler(
diff --git a/packages/kbn-typed-react-router-config/src/create_router.test.tsx b/packages/kbn-typed-react-router-config/src/create_router.test.tsx
index e82fcf791804e..ac337f8bb5b87 100644
--- a/packages/kbn-typed-react-router-config/src/create_router.test.tsx
+++ b/packages/kbn-typed-react-router-config/src/create_router.test.tsx
@@ -267,7 +267,6 @@ describe('createRouter', () => {
const matches = router.matchRoutes('/', history.location);
- // @ts-expect-error 4.3.5 upgrade - router doesn't seem able to merge properly when two routes match
expect(matches[1]?.match.params).toEqual({
query: {
rangeFrom: 'now-30m',
@@ -286,7 +285,6 @@ describe('createRouter', () => {
expect(matchedRoutes.length).toEqual(4);
- // @ts-expect-error 4.3.5 upgrade - router doesn't seem able to merge properly when two routes match
expect(matchedRoutes[matchedRoutes.length - 1].match).toEqual({
isExact: true,
params: {
diff --git a/packages/kbn-typed-react-router-config/src/create_router.ts b/packages/kbn-typed-react-router-config/src/create_router.ts
index 186f949d9c8e8..89ff4fc6b0c6c 100644
--- a/packages/kbn-typed-react-router-config/src/create_router.ts
+++ b/packages/kbn-typed-react-router-config/src/create_router.ts
@@ -23,7 +23,7 @@ function toReactRouterPath(path: string) {
return path.replace(/(?:{([^\/]+)})/g, ':$1');
}
-export function createRouter(routes: TRoute[]): Router {
+export function createRouter(routes: TRoutes): Router {
const routesByReactRouterConfig = new Map();
const reactRouterConfigsByRoute = new Map();
@@ -181,8 +181,10 @@ export function createRouter(routes: TRoute[]): Router {
+ return link(path, ...args);
+ },
getParams: (...args: any[]) => {
const matches = matchRoutes(...args);
return matches.length
@@ -195,13 +197,11 @@ export function createRouter(routes: TRoute[]): Router {
return matchRoutes(...args) as any;
},
- getRoutePath: (route: Route) => {
+ getRoutePath: (route) => {
return reactRouterConfigsByRoute.get(route)!.path as string;
},
getRoutesToMatch: (path: string) => {
- return getRoutesToMatch(path) as unknown as FlattenRoutesOf;
+ return getRoutesToMatch(path) as unknown as FlattenRoutesOf;
},
};
-
- return router;
}
diff --git a/packages/kbn-typed-react-router-config/src/types/index.ts b/packages/kbn-typed-react-router-config/src/types/index.ts
index 3c09b60054a0c..f15fd99a02a87 100644
--- a/packages/kbn-typed-react-router-config/src/types/index.ts
+++ b/packages/kbn-typed-react-router-config/src/types/index.ts
@@ -13,97 +13,13 @@ import { RequiredKeys, ValuesType } from 'utility-types';
// import { unconst } from '../unconst';
import { NormalizePath } from './utils';
-type PathsOfRoute =
- | TRoute['path']
- | (TRoute extends { children: Route[] }
- ? AppendPath | PathsOf
- : never);
-
-export type PathsOf = TRoutes extends []
- ? never
- : TRoutes extends [Route]
- ? PathsOfRoute
- : TRoutes extends [Route, Route]
- ? PathsOfRoute | PathsOfRoute
- : TRoutes extends [Route, Route, Route]
- ? PathsOfRoute | PathsOfRoute | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : string;
+// type PathsOfRoute =
+// | TRoute['path']
+// | (TRoute extends { children: Route[] }
+// ? AppendPath | PathsOf
+// : never);
+
+export type PathsOf = keyof MapRoutes & string;
export interface RouteMatch {
route: TRoute;
@@ -115,7 +31,7 @@ export interface RouteMatch {
params: t.Type;
}
? t.TypeOf
- : AnyObj;
+ : {};
};
}
@@ -160,11 +76,10 @@ interface ReadonlyPlainRoute {
}
export type Route = PlainRoute | ReadonlyPlainRoute;
-type AnyObj = Record;
interface DefaultOutput {
- path: AnyObj;
- query: AnyObj;
+ path: {};
+ query: {};
}
type OutputOfRouteMatch = TRouteMatch extends {
@@ -191,21 +106,20 @@ type TypeOfRouteMatch = TRouteMatch extends {
route: { params: t.Type };
}
? t.TypeOf
- : AnyObj;
+ : {};
type TypeOfMatches = TRouteMatches extends [RouteMatch]
? TypeOfRouteMatch
: TRouteMatches extends [RouteMatch, ...infer TNextRouteMatches]
? TypeOfRouteMatch &
- (TNextRouteMatches extends RouteMatch[] ? TypeOfMatches : AnyObj)
- : AnyObj;
+ (TNextRouteMatches extends RouteMatch[] ? TypeOfMatches : {})
+ : {};
export type TypeOf<
TRoutes extends Route[],
TPath extends PathsOf,
TWithDefaultOutput extends boolean = true
-> = TypeOfMatches> &
- (TWithDefaultOutput extends true ? DefaultOutput : AnyObj);
+> = TypeOfMatches> & (TWithDefaultOutput extends true ? DefaultOutput : {});
export type TypeAsArgs = keyof TObject extends never
? []
@@ -278,7 +192,7 @@ type MapRoute = MaybeUnion<
>;
}
>
- : AnyObj
+ : {}
>;
type MapRoutes = TRoutes extends [Route]
@@ -343,12 +257,20 @@ type MapRoutes = TRoutes extends [Route]
MapRoute &
MapRoute &
MapRoute
- : AnyObj;
+ : {};
// const element = null as any;
// const routes = unconst([
// {
+// path: '/link-to/transaction/{transactionId}',
+// element,
+// },
+// {
+// path: '/link-to/trace/{traceId}',
+// element,
+// },
+// {
// path: '/',
// element,
// children: [
@@ -395,6 +317,10 @@ type MapRoutes = TRoutes extends [Route]
// element,
// },
// {
+// path: '/settings/agent-keys',
+// element,
+// },
+// {
// path: '/settings',
// element,
// },
@@ -432,11 +358,19 @@ type MapRoutes = TRoutes extends [Route]
// element,
// },
// {
+// path: '/services/:serviceName/transactions/view',
+// element,
+// },
+// {
+// path: '/services/:serviceName/dependencies',
+// element,
+// },
+// {
// path: '/services/:serviceName/errors',
// element,
// children: [
// {
-// path: '/:groupId',
+// path: '/services/:serviceName/errors/:groupId',
// element,
// params: t.type({
// path: t.type({
@@ -445,7 +379,7 @@ type MapRoutes = TRoutes extends [Route]
// }),
// },
// {
-// path: '/services/:serviceName',
+// path: '/services/:serviceName/errors',
// element,
// params: t.partial({
// query: t.partial({
@@ -459,15 +393,33 @@ type MapRoutes = TRoutes extends [Route]
// ],
// },
// {
-// path: '/services/:serviceName/foo',
+// path: '/services/:serviceName/metrics',
+// element,
+// },
+// {
+// path: '/services/:serviceName/nodes',
+// element,
+// children: [
+// {
+// path: '/services/{serviceName}/nodes/{serviceNodeName}/metrics',
+// element,
+// },
+// {
+// path: '/services/:serviceName/nodes',
+// element,
+// },
+// ],
+// },
+// {
+// path: '/services/:serviceName/service-map',
// element,
// },
// {
-// path: '/services/:serviceName/bar',
+// path: '/services/:serviceName/logs',
// element,
// },
// {
-// path: '/services/:serviceName/baz',
+// path: '/services/:serviceName/profiling',
// element,
// },
// {
@@ -499,6 +451,24 @@ type MapRoutes = TRoutes extends [Route]
// element,
// },
// {
+// path: '/backends',
+// element,
+// children: [
+// {
+// path: '/backends/{backendName}/overview',
+// element,
+// },
+// {
+// path: '/backends/overview',
+// element,
+// },
+// {
+// path: '/backends',
+// element,
+// },
+// ],
+// },
+// {
// path: '/',
// element,
// },
@@ -511,10 +481,11 @@ type MapRoutes = TRoutes extends [Route]
// type Routes = typeof routes;
// type Mapped = keyof MapRoutes;
+// type Paths = PathsOf;
// type Bar = ValuesType>['route']['path'];
// type Foo = OutputOf;
-// type Baz = OutputOf;
+// // type Baz = OutputOf;
// const { path }: Foo = {} as any;
@@ -522,4 +493,4 @@ type MapRoutes = TRoutes extends [Route]
// return {} as any;
// }
-// const params = _useApmParams('/*');
+// // const params = _useApmParams('/services/:serviceName/nodes/*');
diff --git a/src/cli/serve/integration_tests/invalid_config.test.ts b/src/cli/serve/integration_tests/invalid_config.test.ts
index 2de902582a548..ca051f37a816e 100644
--- a/src/cli/serve/integration_tests/invalid_config.test.ts
+++ b/src/cli/serve/integration_tests/invalid_config.test.ts
@@ -8,7 +8,7 @@
import { spawnSync } from 'child_process';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const INVALID_CONFIG_PATH = require.resolve('./__fixtures__/invalid_config.yml');
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index 24c085ef64de3..fed3aa3093166 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -113,6 +113,7 @@ export class DocLinksService {
usersAccess: `${ENTERPRISE_SEARCH_DOCS}users-access.html`,
},
workplaceSearch: {
+ apiKeys: `${WORKPLACE_SEARCH_DOCS}workplace-search-api-authentication.html`,
box: `${WORKPLACE_SEARCH_DOCS}workplace-search-box-connector.html`,
confluenceCloud: `${WORKPLACE_SEARCH_DOCS}workplace-search-confluence-cloud-connector.html`,
confluenceServer: `${WORKPLACE_SEARCH_DOCS}workplace-search-confluence-server-connector.html`,
@@ -485,6 +486,7 @@ export class DocLinksService {
hdfsRepo: `${PLUGIN_DOCS}repository-hdfs.html`,
s3Repo: `${PLUGIN_DOCS}repository-s3.html`,
snapshotRestoreRepos: `${PLUGIN_DOCS}repository.html`,
+ mapperSize: `${PLUGIN_DOCS}mapper-size-usage.html`,
},
snapshotRestore: {
guide: `${ELASTICSEARCH_DOCS}snapshot-restore.html`,
@@ -671,6 +673,7 @@ export interface DocLinksStart {
readonly usersAccess: string;
};
readonly workplaceSearch: {
+ readonly apiKeys: string;
readonly box: string;
readonly confluenceCloud: string;
readonly confluenceServer: string;
@@ -872,7 +875,14 @@ export interface DocLinksStart {
}>;
readonly watcher: Record;
readonly ccs: Record;
- readonly plugins: Record;
+ readonly plugins: {
+ azureRepo: string;
+ gcsRepo: string;
+ hdfsRepo: string;
+ s3Repo: string;
+ snapshotRestoreRepos: string;
+ mapperSize: string;
+ };
readonly snapshotRestore: Record;
readonly ingest: Record;
readonly fleet: Readonly<{
diff --git a/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap b/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap
index e93ef34c38025..1c394112a404c 100644
--- a/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap
+++ b/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap
@@ -98,6 +98,7 @@ exports[`#start() returns \`Context\` component 1`] = `
"euiDataGridToolbar.fullScreenButtonActive": "Exit full screen",
"euiDatePopoverButton.invalidTitle": [Function],
"euiDatePopoverButton.outdatedTitle": [Function],
+ "euiErrorBoundary.error": "Error",
"euiFieldPassword.maskPassword": "Mask password",
"euiFieldPassword.showPassword": "Show password as plain text. Note: this will visually expose your password on the screen.",
"euiFilePicker.clearSelectedFiles": "Clear selected files",
@@ -218,7 +219,7 @@ exports[`#start() returns \`Context\` component 1`] = `
"euiStyleSelector.labelExpanded": "Expanded density",
"euiStyleSelector.labelNormal": "Normal density",
"euiSuperDatePicker.showDatesButtonLabel": "Show dates",
- "euiSuperSelect.screenReaderAnnouncement": [Function],
+ "euiSuperSelect.screenReaderAnnouncement": "You are in a form selector and must select a single option. Use the up and down keys to navigate or escape to close.",
"euiSuperSelectControl.selectAnOption": [Function],
"euiSuperUpdateButton.cannotUpdateTooltip": "Cannot update",
"euiSuperUpdateButton.clickToApplyTooltip": "Click to apply",
diff --git a/src/core/public/i18n/i18n_eui_mapping.tsx b/src/core/public/i18n/i18n_eui_mapping.tsx
index 7c4d39fa2b11a..e3357d138e794 100644
--- a/src/core/public/i18n/i18n_eui_mapping.tsx
+++ b/src/core/public/i18n/i18n_eui_mapping.tsx
@@ -663,6 +663,10 @@ export const getEuiContextMapping = (): EuiTokensObject => {
defaultMessage: '+ {messagesLength} more',
values: { messagesLength },
}),
+ 'euiErrorBoundary.error': i18n.translate('core.euiErrorBoundary.error', {
+ defaultMessage: 'Error',
+ description: 'Error boundary for uncaught exceptions when rendering part of the application',
+ }),
'euiNotificationEventMessages.accordionAriaLabelButtonText': ({
messagesLength,
eventName,
@@ -1046,12 +1050,13 @@ export const getEuiContextMapping = (): EuiTokensObject => {
description: 'Displayed in a button that shows date picker',
}
),
- 'euiSuperSelect.screenReaderAnnouncement': ({ optionsCount }: EuiValues) =>
- i18n.translate('core.euiSuperSelect.screenReaderAnnouncement', {
+ 'euiSuperSelect.screenReaderAnnouncement': i18n.translate(
+ 'core.euiSuperSelect.screenReaderAnnouncement',
+ {
defaultMessage:
- 'You are in a form selector of {optionsCount} items and must select a single option. Use the up and down keys to navigate or escape to close.',
- values: { optionsCount },
- }),
+ 'You are in a form selector and must select a single option. Use the up and down keys to navigate or escape to close.',
+ }
+ ),
'euiSuperSelectControl.selectAnOption': ({ selectedValue }: EuiValues) =>
i18n.translate('core.euiSuperSelectControl.selectAnOption', {
defaultMessage: 'Select an option: {selectedValue}, is selected',
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index 30225acb3dd8d..63e0898b5fb90 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -571,6 +571,7 @@ export interface DocLinksStart {
readonly usersAccess: string;
};
readonly workplaceSearch: {
+ readonly apiKeys: string;
readonly box: string;
readonly confluenceCloud: string;
readonly confluenceServer: string;
@@ -772,7 +773,14 @@ export interface DocLinksStart {
}>;
readonly watcher: Record;
readonly ccs: Record;
- readonly plugins: Record;
+ readonly plugins: {
+ azureRepo: string;
+ gcsRepo: string;
+ hdfsRepo: string;
+ s3Repo: string;
+ snapshotRestoreRepos: string;
+ mapperSize: string;
+ };
readonly snapshotRestore: Record;
readonly ingest: Record;
readonly fleet: Readonly<{
diff --git a/src/core/server/capabilities/integration_tests/capabilities_service.test.ts b/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
index 2e80fbb9d20c0..c1f6ffb5add77 100644
--- a/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
+++ b/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
@@ -7,7 +7,7 @@
*/
import supertest from 'supertest';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { HttpService, InternalHttpServicePreboot, InternalHttpServiceSetup } from '../../http';
import { contextServiceMock } from '../../context/context_service.mock';
import { executionContextServiceMock } from '../../execution_context/execution_context_service.mock';
diff --git a/src/core/server/core_context.mock.ts b/src/core/server/core_context.mock.ts
index ddb87d31383c8..4d7b4e1ba5548 100644
--- a/src/core/server/core_context.mock.ts
+++ b/src/core/server/core_context.mock.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import type { DeeplyMockedKeys } from '@kbn/utility-types/jest';
import { CoreContext } from './core_context';
import { Env, IConfigService } from './config';
diff --git a/src/core/server/elasticsearch/client/configure_client.test.ts b/src/core/server/elasticsearch/client/configure_client.test.ts
index 7988e81045d17..f252993415afa 100644
--- a/src/core/server/elasticsearch/client/configure_client.test.ts
+++ b/src/core/server/elasticsearch/client/configure_client.test.ts
@@ -6,21 +6,16 @@
* Side Public License, v 1.
*/
-import { Buffer } from 'buffer';
-import { Readable } from 'stream';
-
-import { errors } from '@elastic/elasticsearch';
-import type {
- TransportRequestOptions,
- TransportRequestParams,
- DiagnosticResult,
- RequestBody,
-} from '@elastic/elasticsearch';
+jest.mock('./log_query_and_deprecation.ts', () => ({
+ __esModule: true,
+ instrumentEsQueryAndDeprecationLogger: jest.fn(),
+}));
import { parseClientOptionsMock, ClientMock } from './configure_client.test.mocks';
import { loggingSystemMock } from '../../logging/logging_system.mock';
import type { ElasticsearchClientConfig } from './client_config';
import { configureClient } from './configure_client';
+import { instrumentEsQueryAndDeprecationLogger } from './log_query_and_deprecation';
const createFakeConfig = (
parts: Partial = {}
@@ -36,40 +31,9 @@ const createFakeClient = () => {
const client = new actualEs.Client({
nodes: ['http://localhost'], // Enforcing `nodes` because it's mandatory
});
- jest.spyOn(client.diagnostic, 'on');
return client;
};
-const createApiResponse = ({
- body,
- statusCode = 200,
- headers = {},
- warnings = [],
- params,
- requestOptions = {},
-}: {
- body: T;
- statusCode?: number;
- headers?: Record;
- warnings?: string[];
- params?: TransportRequestParams;
- requestOptions?: TransportRequestOptions;
-}): DiagnosticResult => {
- return {
- body,
- statusCode,
- headers,
- warnings,
- meta: {
- body,
- request: {
- params: params!,
- options: requestOptions,
- } as any,
- } as any,
- };
-};
-
describe('configureClient', () => {
let logger: ReturnType;
let config: ElasticsearchClientConfig;
@@ -84,6 +48,7 @@ describe('configureClient', () => {
afterEach(() => {
parseClientOptionsMock.mockReset();
ClientMock.mockReset();
+ jest.clearAllMocks();
});
it('calls `parseClientOptions` with the correct parameters', () => {
@@ -113,366 +78,14 @@ describe('configureClient', () => {
expect(client).toBe(ClientMock.mock.results[0].value);
});
- it('listens to client on `response` events', () => {
+ it('calls instrumentEsQueryAndDeprecationLogger', () => {
const client = configureClient(config, { logger, type: 'test', scoped: false });
- expect(client.diagnostic.on).toHaveBeenCalledTimes(1);
- expect(client.diagnostic.on).toHaveBeenCalledWith('response', expect.any(Function));
- });
-
- describe('Client logging', () => {
- function createResponseWithBody(body?: RequestBody) {
- return createApiResponse({
- body: {},
- statusCode: 200,
- params: {
- method: 'GET',
- path: '/foo',
- querystring: { hello: 'dolly' },
- body,
- },
- });
- }
-
- describe('logs each query', () => {
- it('creates a query logger context based on the `type` parameter', () => {
- configureClient(createFakeConfig(), { logger, type: 'test123' });
- expect(logger.get).toHaveBeenCalledWith('query', 'test123');
- });
-
- it('when request body is an object', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- });
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is a string', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody(
- JSON.stringify({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- })
- );
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is a buffer', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody(
- Buffer.from(
- JSON.stringify({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- })
- )
- );
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- [buffer]",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is a readable stream', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody(
- Readable.from(
- JSON.stringify({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- })
- )
- );
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- [stream]",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is not defined', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody();
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly",
- undefined,
- ],
- ]
- `);
- });
-
- it('properly encode queries', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({
- body: {},
- statusCode: 200,
- params: {
- method: 'GET',
- path: '/foo',
- querystring: { city: 'Münich' },
- },
- });
-
- client.diagnostic.emit('response', null, response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?city=M%C3%BCnich",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs queries even in case of errors', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({
- statusCode: 500,
- body: {
- error: {
- type: 'internal server error',
- },
- },
- params: {
- method: 'GET',
- path: '/foo',
- querystring: { hello: 'dolly' },
- body: {
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- },
- },
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "500
- GET /foo?hello=dolly
- {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}} [internal server error]: internal server error",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs debug when the client emits an @elastic/elasticsearch error', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({ body: {} });
- client.diagnostic.emit('response', new errors.TimeoutError('message', response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "[TimeoutError]: message",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs debug when the client emits an ResponseError returned by elasticsearch', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- querystring: { hello: 'dolly' },
- },
- body: {
- error: {
- type: 'illegal_argument_exception',
- reason: 'request [/_path] contains unrecognized parameter: [name]',
- },
- },
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "400
- GET /_path?hello=dolly [illegal_argument_exception]: request [/_path] contains unrecognized parameter: [name]",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs default error info when the error response body is empty', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- let response: DiagnosticResult = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- body: {
- error: {},
- },
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "400
- GET /_path [undefined]: {\\"error\\":{}}",
- undefined,
- ],
- ]
- `);
-
- logger.debug.mockClear();
-
- response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- body: undefined,
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "400
- GET /_path [undefined]: Response Error",
- undefined,
- ],
- ]
- `);
- });
-
- it('adds meta information to logs', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- let response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- requestOptions: {
- opaqueId: 'opaque-id',
- },
- body: {
- error: {},
- },
- });
- client.diagnostic.emit('response', null, response);
-
- expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
- Object {
- "http": Object {
- "request": Object {
- "id": "opaque-id",
- },
- },
- }
- `);
-
- logger.debug.mockClear();
-
- response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- requestOptions: {
- opaqueId: 'opaque-id',
- },
- body: {} as any,
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
- Object {
- "http": Object {
- "request": Object {
- "id": "opaque-id",
- },
- },
- }
- `);
- });
+ expect(instrumentEsQueryAndDeprecationLogger).toHaveBeenCalledTimes(1);
+ expect(instrumentEsQueryAndDeprecationLogger).toHaveBeenCalledWith({
+ logger,
+ client,
+ type: 'test',
});
});
});
diff --git a/src/core/server/elasticsearch/client/configure_client.ts b/src/core/server/elasticsearch/client/configure_client.ts
index fc8a06660cc5e..e48a36fa4fe58 100644
--- a/src/core/server/elasticsearch/client/configure_client.ts
+++ b/src/core/server/elasticsearch/client/configure_client.ts
@@ -6,21 +6,17 @@
* Side Public License, v 1.
*/
-import { Buffer } from 'buffer';
-import { stringify } from 'querystring';
-import { Client, errors, Transport, HttpConnection } from '@elastic/elasticsearch';
+import { Client, Transport, HttpConnection } from '@elastic/elasticsearch';
import type { KibanaClient } from '@elastic/elasticsearch/lib/api/kibana';
import type {
TransportRequestParams,
TransportRequestOptions,
TransportResult,
- DiagnosticResult,
- RequestBody,
} from '@elastic/elasticsearch';
import { Logger } from '../../logging';
import { parseClientOptions, ElasticsearchClientConfig } from './client_config';
-import type { ElasticsearchErrorDetails } from './types';
+import { instrumentEsQueryAndDeprecationLogger } from './log_query_and_deprecation';
const noop = () => undefined;
@@ -61,91 +57,8 @@ export const configureClient = (
Transport: KibanaTransport,
Connection: HttpConnection,
});
- addLogging(client, logger.get('query', type));
- return client as KibanaClient;
-};
-
-const convertQueryString = (qs: string | Record | undefined): string => {
- if (qs === undefined || typeof qs === 'string') {
- return qs ?? '';
- }
- return stringify(qs);
-};
-
-function ensureString(body: RequestBody): string {
- if (typeof body === 'string') return body;
- if (Buffer.isBuffer(body)) return '[buffer]';
- if ('readable' in body && body.readable && typeof body._read === 'function') return '[stream]';
- return JSON.stringify(body);
-}
-
-/**
- * Returns a debug message from an Elasticsearch error in the following format:
- * [error type] error reason
- */
-export function getErrorMessage(error: errors.ElasticsearchClientError): string {
- if (error instanceof errors.ResponseError) {
- const errorBody = error.meta.body as ElasticsearchErrorDetails;
- return `[${errorBody?.error?.type}]: ${errorBody?.error?.reason ?? error.message}`;
- }
- return `[${error.name}]: ${error.message}`;
-}
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type });
-/**
- * returns a string in format:
- *
- * status code
- * method URL
- * request body
- *
- * so it could be copy-pasted into the Dev console
- */
-function getResponseMessage(event: DiagnosticResult): string {
- const errorMeta = getRequestDebugMeta(event);
- const body = errorMeta.body ? `\n${errorMeta.body}` : '';
- return `${errorMeta.statusCode}\n${errorMeta.method} ${errorMeta.url}${body}`;
-}
-
-/**
- * Returns stringified debug information from an Elasticsearch request event
- * useful for logging in case of an unexpected failure.
- */
-export function getRequestDebugMeta(event: DiagnosticResult): {
- url: string;
- body: string;
- statusCode: number | null;
- method: string;
-} {
- const params = event.meta.request.params;
- // definition is wrong, `params.querystring` can be either a string or an object
- const querystring = convertQueryString(params.querystring);
- return {
- url: `${params.path}${querystring ? `?${querystring}` : ''}`,
- body: params.body ? `${ensureString(params.body)}` : '',
- method: params.method,
- statusCode: event.statusCode!,
- };
-}
-
-const addLogging = (client: Client, logger: Logger) => {
- client.diagnostic.on('response', (error, event) => {
- if (event) {
- const opaqueId = event.meta.request.options.opaqueId;
- const meta = opaqueId
- ? {
- http: { request: { id: event.meta.request.options.opaqueId } },
- }
- : undefined; // do not clutter logs if opaqueId is not present
- if (error) {
- if (error instanceof errors.ResponseError) {
- logger.debug(`${getResponseMessage(event)} ${getErrorMessage(error)}`, meta);
- } else {
- logger.debug(getErrorMessage(error), meta);
- }
- } else {
- logger.debug(getResponseMessage(event), meta);
- }
- }
- });
+ return client as KibanaClient;
};
diff --git a/src/core/server/elasticsearch/client/index.ts b/src/core/server/elasticsearch/client/index.ts
index 2cf5a0229a489..123c498f1ee21 100644
--- a/src/core/server/elasticsearch/client/index.ts
+++ b/src/core/server/elasticsearch/client/index.ts
@@ -21,5 +21,6 @@ export type { IScopedClusterClient } from './scoped_cluster_client';
export type { ElasticsearchClientConfig } from './client_config';
export { ClusterClient } from './cluster_client';
export type { IClusterClient, ICustomClusterClient } from './cluster_client';
-export { configureClient, getRequestDebugMeta, getErrorMessage } from './configure_client';
+export { configureClient } from './configure_client';
+export { getRequestDebugMeta, getErrorMessage } from './log_query_and_deprecation';
export { retryCallCluster, migrationRetryCallCluster } from './retry_call_cluster';
diff --git a/src/core/server/elasticsearch/client/log_query_and_deprecation.test.ts b/src/core/server/elasticsearch/client/log_query_and_deprecation.test.ts
new file mode 100644
index 0000000000000..30d5d8b87ed1c
--- /dev/null
+++ b/src/core/server/elasticsearch/client/log_query_and_deprecation.test.ts
@@ -0,0 +1,624 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { Buffer } from 'buffer';
+import { Readable } from 'stream';
+
+import {
+ Client,
+ ConnectionRequestParams,
+ errors,
+ TransportRequestOptions,
+ TransportRequestParams,
+} from '@elastic/elasticsearch';
+import type { DiagnosticResult, RequestBody } from '@elastic/elasticsearch';
+
+import { parseClientOptionsMock, ClientMock } from './configure_client.test.mocks';
+import { loggingSystemMock } from '../../logging/logging_system.mock';
+import { instrumentEsQueryAndDeprecationLogger } from './log_query_and_deprecation';
+
+const createApiResponse = ({
+ body,
+ statusCode = 200,
+ headers = {},
+ warnings = null,
+ params,
+ requestOptions = {},
+}: {
+ body: T;
+ statusCode?: number;
+ headers?: Record;
+ warnings?: string[] | null;
+ params?: TransportRequestParams | ConnectionRequestParams;
+ requestOptions?: TransportRequestOptions;
+}): DiagnosticResult => {
+ return {
+ body,
+ statusCode,
+ headers,
+ warnings,
+ meta: {
+ body,
+ request: {
+ params: params!,
+ options: requestOptions,
+ } as any,
+ } as any,
+ };
+};
+
+const createFakeClient = () => {
+ const actualEs = jest.requireActual('@elastic/elasticsearch');
+ const client = new actualEs.Client({
+ nodes: ['http://localhost'], // Enforcing `nodes` because it's mandatory
+ });
+ jest.spyOn(client.diagnostic, 'on');
+ return client as Client;
+};
+
+describe('instrumentQueryAndDeprecationLogger', () => {
+ let logger: ReturnType;
+ const client = createFakeClient();
+
+ beforeEach(() => {
+ logger = loggingSystemMock.createLogger();
+ parseClientOptionsMock.mockReturnValue({});
+ ClientMock.mockImplementation(() => createFakeClient());
+ });
+
+ afterEach(() => {
+ parseClientOptionsMock.mockReset();
+ ClientMock.mockReset();
+ jest.clearAllMocks();
+ });
+
+ function createResponseWithBody(body?: RequestBody) {
+ return createApiResponse({
+ body: {},
+ statusCode: 200,
+ params: {
+ method: 'GET',
+ path: '/foo',
+ querystring: { hello: 'dolly' },
+ body,
+ },
+ });
+ }
+
+ it('creates a query logger context based on the `type` parameter', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test123' });
+ expect(logger.get).toHaveBeenCalledWith('query', 'test123');
+ });
+
+ describe('logs each query', () => {
+ it('when request body is an object', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ });
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is a string', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody(
+ JSON.stringify({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ })
+ );
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is a buffer', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody(
+ Buffer.from(
+ JSON.stringify({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ })
+ )
+ );
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ [buffer]",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is a readable stream', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody(
+ Readable.from(
+ JSON.stringify({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ })
+ )
+ );
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ [stream]",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is not defined', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody();
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('properly encode queries', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ body: {},
+ statusCode: 200,
+ params: {
+ method: 'GET',
+ path: '/foo',
+ querystring: { city: 'Münich' },
+ },
+ });
+
+ client.diagnostic.emit('response', null, response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?city=M%C3%BCnich",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs queries even in case of errors', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 500,
+ body: {
+ error: {
+ type: 'internal server error',
+ },
+ },
+ params: {
+ method: 'GET',
+ path: '/foo',
+ querystring: { hello: 'dolly' },
+ body: {
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "500
+ GET /foo?hello=dolly
+ {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}} [internal server error]: internal server error",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs debug when the client emits an @elastic/elasticsearch error', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({ body: {} });
+ client.diagnostic.emit('response', new errors.TimeoutError('message', response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "[TimeoutError]: message",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs debug when the client emits an ResponseError returned by elasticsearch', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ error: {
+ type: 'illegal_argument_exception',
+ reason: 'request [/_path] contains unrecognized parameter: [name]',
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "400
+ GET /_path?hello=dolly [illegal_argument_exception]: request [/_path] contains unrecognized parameter: [name]",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs default error info when the error response body is empty', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ let response: DiagnosticResult = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ body: {
+ error: {},
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "400
+ GET /_path [undefined]: {\\"error\\":{}}",
+ undefined,
+ ],
+ ]
+ `);
+
+ logger.debug.mockClear();
+
+ response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ body: undefined,
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "400
+ GET /_path [undefined]: Response Error",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('adds meta information to logs', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ let response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ requestOptions: {
+ opaqueId: 'opaque-id',
+ },
+ body: {
+ error: {},
+ },
+ });
+ client.diagnostic.emit('response', null, response);
+
+ expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
+ Object {
+ "http": Object {
+ "request": Object {
+ "id": "opaque-id",
+ },
+ },
+ }
+ `);
+
+ logger.debug.mockClear();
+
+ response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ requestOptions: {
+ opaqueId: 'opaque-id',
+ },
+ body: {} as any,
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
+ Object {
+ "http": Object {
+ "request": Object {
+ "id": "opaque-id",
+ },
+ },
+ }
+ `);
+ });
+ });
+
+ describe('deprecation warnings from response headers', () => {
+ it('does not log when no deprecation warning header is returned', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: null,
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ });
+
+ it('does not log when warning header comes from a warn-agent that is not elasticsearch', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: [
+ '299 nginx/2.3.1 "GET /_path is deprecated"',
+ '299 nginx/2.3.1 "GET hello query param is deprecated"',
+ ],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ });
+
+ it('logs error when the client receives an Elasticsearch error response for a deprecated request originating from a user', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 400,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ error: {
+ type: 'illegal_argument_exception',
+ reason: 'request [/_path] contains unrecognized parameter: [name]',
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ // Test debug[1] since theree is one log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch('Origin:user');
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ /Query:\n.*400\n.*GET \/_path\?hello\=dolly \[illegal_argument_exception\]: request \[\/_path\] contains unrecognized parameter: \[name\]/
+ );
+ });
+
+ it('logs warning when the client receives an Elasticsearch error response for a deprecated request originating from kibana', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 400,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ // Set the request header to indicate to Elasticsearch that this is a request over which users have no control
+ headers: { 'x-elastic-product-origin': 'kibana' },
+ },
+ body: {
+ error: {
+ type: 'illegal_argument_exception',
+ reason: 'request [/_path] contains unrecognized parameter: [name]',
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch('Origin:kibana');
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ /Query:\n.*400\n.*GET \/_path\?hello\=dolly \[illegal_argument_exception\]: request \[\/_path\] contains unrecognized parameter: \[name\]/
+ );
+ });
+
+ it('logs error when the client receives an Elasticsearch success response for a deprecated request originating from a user', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', null, response);
+
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ // Test debug[1] since theree is one log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch('Origin:user');
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ /Query:\n.*200\n.*GET \/_path\?hello\=dolly/
+ );
+ });
+
+ it('logs warning when the client receives an Elasticsearch success response for a deprecated request originating from kibana', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ // Set the request header to indicate to Elasticsearch that this is a request over which users have no control
+ headers: { 'x-elastic-product-origin': 'kibana' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', null, response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch('Origin:kibana');
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ /Query:\n.*200\n.*GET \/_path\?hello\=dolly/
+ );
+ });
+ });
+});
diff --git a/src/core/server/elasticsearch/client/log_query_and_deprecation.ts b/src/core/server/elasticsearch/client/log_query_and_deprecation.ts
new file mode 100644
index 0000000000000..fc5a0fa6e1111
--- /dev/null
+++ b/src/core/server/elasticsearch/client/log_query_and_deprecation.ts
@@ -0,0 +1,143 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { Buffer } from 'buffer';
+import { stringify } from 'querystring';
+import { errors, DiagnosticResult, RequestBody, Client } from '@elastic/elasticsearch';
+import type { ElasticsearchErrorDetails } from './types';
+import { Logger } from '../../logging';
+
+const convertQueryString = (qs: string | Record | undefined): string => {
+ if (qs === undefined || typeof qs === 'string') {
+ return qs ?? '';
+ }
+ return stringify(qs);
+};
+
+function ensureString(body: RequestBody): string {
+ if (typeof body === 'string') return body;
+ if (Buffer.isBuffer(body)) return '[buffer]';
+ if ('readable' in body && body.readable && typeof body._read === 'function') return '[stream]';
+ return JSON.stringify(body);
+}
+
+/**
+ * Returns a debug message from an Elasticsearch error in the following format:
+ * [error type] error reason
+ */
+export function getErrorMessage(error: errors.ElasticsearchClientError): string {
+ if (error instanceof errors.ResponseError) {
+ const errorBody = error.meta.body as ElasticsearchErrorDetails;
+ return `[${errorBody?.error?.type}]: ${errorBody?.error?.reason ?? error.message}`;
+ }
+ return `[${error.name}]: ${error.message}`;
+}
+
+/**
+ * returns a string in format:
+ *
+ * status code
+ * method URL
+ * request body
+ *
+ * so it could be copy-pasted into the Dev console
+ */
+function getResponseMessage(event: DiagnosticResult): string {
+ const errorMeta = getRequestDebugMeta(event);
+ const body = errorMeta.body ? `\n${errorMeta.body}` : '';
+ return `${errorMeta.statusCode}\n${errorMeta.method} ${errorMeta.url}${body}`;
+}
+
+/**
+ * Returns stringified debug information from an Elasticsearch request event
+ * useful for logging in case of an unexpected failure.
+ */
+export function getRequestDebugMeta(event: DiagnosticResult): {
+ url: string;
+ body: string;
+ statusCode: number | null;
+ method: string;
+} {
+ const params = event.meta.request.params;
+ // definition is wrong, `params.querystring` can be either a string or an object
+ const querystring = convertQueryString(params.querystring);
+ return {
+ url: `${params.path}${querystring ? `?${querystring}` : ''}`,
+ body: params.body ? `${ensureString(params.body)}` : '',
+ method: params.method,
+ statusCode: event.statusCode!,
+ };
+}
+
+/** HTTP Warning headers have the following syntax:
+ * (where warn-code is a three digit number)
+ * This function tests if a warning comes from an Elasticsearch warn-agent
+ * */
+const isEsWarning = (warning: string) => /\d\d\d Elasticsearch-/.test(warning);
+
+export const instrumentEsQueryAndDeprecationLogger = ({
+ logger,
+ client,
+ type,
+}: {
+ logger: Logger;
+ client: Client;
+ type: string;
+}) => {
+ const queryLogger = logger.get('query', type);
+ const deprecationLogger = logger.get('deprecation');
+ client.diagnostic.on('response', (error, event) => {
+ if (event) {
+ const opaqueId = event.meta.request.options.opaqueId;
+ const meta = opaqueId
+ ? {
+ http: { request: { id: event.meta.request.options.opaqueId } },
+ }
+ : undefined; // do not clutter logs if opaqueId is not present
+ let queryMsg = '';
+ if (error) {
+ if (error instanceof errors.ResponseError) {
+ queryMsg = `${getResponseMessage(event)} ${getErrorMessage(error)}`;
+ } else {
+ queryMsg = getErrorMessage(error);
+ }
+ } else {
+ queryMsg = getResponseMessage(event);
+ }
+
+ queryLogger.debug(queryMsg, meta);
+
+ if (event.warnings && event.warnings.filter(isEsWarning).length > 0) {
+ // Plugins can explicitly mark requests as originating from a user by
+ // removing the `'x-elastic-product-origin': 'kibana'` header that's
+ // added by default. User requests will be shown to users in the
+ // upgrade assistant UI as an action item that has to be addressed
+ // before they upgrade.
+ // Kibana requests will be hidden from the upgrade assistant UI and are
+ // only logged to help developers maintain their plugins
+ const requestOrigin =
+ (event.meta.request.params.headers != null &&
+ (event.meta.request.params.headers[
+ 'x-elastic-product-origin'
+ ] as unknown as string)) === 'kibana'
+ ? 'kibana'
+ : 'user';
+
+ // Strip the first 5 stack trace lines as these are irrelavent to finding the call site
+ const stackTrace = new Error().stack?.split('\n').slice(5).join('\n');
+
+ const deprecationMsg = `Elasticsearch deprecation: ${event.warnings}\nOrigin:${requestOrigin}\nStack trace:\n${stackTrace}\nQuery:\n${queryMsg}`;
+ if (requestOrigin === 'kibana') {
+ deprecationLogger.info(deprecationMsg);
+ } else {
+ deprecationLogger.debug(deprecationMsg);
+ }
+ }
+ }
+ });
+};
diff --git a/src/core/server/elasticsearch/elasticsearch_service.test.ts b/src/core/server/elasticsearch/elasticsearch_service.test.ts
index 3b75d19b80a10..ce5672ad30519 100644
--- a/src/core/server/elasticsearch/elasticsearch_service.test.ts
+++ b/src/core/server/elasticsearch/elasticsearch_service.test.ts
@@ -21,7 +21,7 @@ import { MockClusterClient, isScriptingEnabledMock } from './elasticsearch_servi
import type { NodesVersionCompatibility } from './version_check/ensure_es_version';
import { BehaviorSubject } from 'rxjs';
import { first } from 'rxjs/operators';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '../config';
import { configServiceMock, getEnvOptions } from '../config/mocks';
import { CoreContext } from '../core_context';
diff --git a/src/core/server/http/cookie_session_storage.test.ts b/src/core/server/http/cookie_session_storage.test.ts
index ad05d37c81e99..8e2cd58733faf 100644
--- a/src/core/server/http/cookie_session_storage.test.ts
+++ b/src/core/server/http/cookie_session_storage.test.ts
@@ -8,7 +8,7 @@
import { parse as parseCookie } from 'tough-cookie';
import supertest from 'supertest';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ByteSizeValue } from '@kbn/config-schema';
import { BehaviorSubject } from 'rxjs';
diff --git a/src/core/server/http/http_service.test.ts b/src/core/server/http/http_service.test.ts
index 4955d19668580..3a387cdfd5e35 100644
--- a/src/core/server/http/http_service.test.ts
+++ b/src/core/server/http/http_service.test.ts
@@ -10,7 +10,7 @@ import { mockHttpServer } from './http_service.test.mocks';
import { noop } from 'lodash';
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../config/mocks';
import { HttpService } from '.';
import { HttpConfigType, config } from './http_config';
diff --git a/src/core/server/http/test_utils.ts b/src/core/server/http/test_utils.ts
index 4e1a88e967f8f..8a8c545b365b3 100644
--- a/src/core/server/http/test_utils.ts
+++ b/src/core/server/http/test_utils.ts
@@ -8,7 +8,7 @@
import { BehaviorSubject } from 'rxjs';
import moment from 'moment';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ByteSizeValue } from '@kbn/config-schema';
import { Env } from '../config';
import { HttpService } from './http_service';
diff --git a/src/core/server/metrics/logging/get_ops_metrics_log.test.ts b/src/core/server/metrics/logging/get_ops_metrics_log.test.ts
index cba188c94c74e..3fd3c4a7a24d6 100644
--- a/src/core/server/metrics/logging/get_ops_metrics_log.test.ts
+++ b/src/core/server/metrics/logging/get_ops_metrics_log.test.ts
@@ -42,6 +42,7 @@ const testMetrics = {
memory: { heap: { used_in_bytes: 100 } },
uptime_in_millis: 1500,
event_loop_delay: 50,
+ event_loop_delay_histogram: { percentiles: { '50': 50, '75': 75, '95': 95, '99': 99 } },
},
os: {
load: {
@@ -56,7 +57,7 @@ describe('getEcsOpsMetricsLog', () => {
it('provides correctly formatted message', () => {
const result = getEcsOpsMetricsLog(createMockOpsMetrics(testMetrics));
expect(result.message).toMatchInlineSnapshot(
- `"memory: 100.0B uptime: 0:00:01 load: [10.00,20.00,30.00] delay: 50.000"`
+ `"memory: 100.0B uptime: 0:00:01 load: [10.00,20.00,30.00] mean delay: 50.000 delay histogram: { 50: 50.000; 95: 95.000; 99: 99.000 }"`
);
});
@@ -70,6 +71,7 @@ describe('getEcsOpsMetricsLog', () => {
const missingMetrics = {
...baseMetrics,
process: {},
+ processes: [],
os: {},
} as unknown as OpsMetrics;
const logMeta = getEcsOpsMetricsLog(missingMetrics);
@@ -77,39 +79,41 @@ describe('getEcsOpsMetricsLog', () => {
});
it('provides an ECS-compatible response', () => {
- const logMeta = getEcsOpsMetricsLog(createBaseOpsMetrics());
- expect(logMeta).toMatchInlineSnapshot(`
+ const logMeta = getEcsOpsMetricsLog(createMockOpsMetrics(testMetrics));
+ expect(logMeta.meta).toMatchInlineSnapshot(`
Object {
- "message": "memory: 1.0B load: [1.00,1.00,1.00] delay: 1.000",
- "meta": Object {
- "event": Object {
- "category": Array [
- "process",
- "host",
- ],
- "kind": "metric",
- "type": Array [
- "info",
- ],
- },
- "host": Object {
- "os": Object {
- "load": Object {
- "15m": 1,
- "1m": 1,
- "5m": 1,
- },
+ "event": Object {
+ "category": Array [
+ "process",
+ "host",
+ ],
+ "kind": "metric",
+ "type": Array [
+ "info",
+ ],
+ },
+ "host": Object {
+ "os": Object {
+ "load": Object {
+ "15m": 30,
+ "1m": 10,
+ "5m": 20,
},
},
- "process": Object {
- "eventLoopDelay": 1,
- "memory": Object {
- "heap": Object {
- "usedInBytes": 1,
- },
+ },
+ "process": Object {
+ "eventLoopDelay": 50,
+ "eventLoopDelayHistogram": Object {
+ "50": 50,
+ "95": 95,
+ "99": 99,
+ },
+ "memory": Object {
+ "heap": Object {
+ "usedInBytes": 100,
},
- "uptime": 0,
},
+ "uptime": 1,
},
}
`);
diff --git a/src/core/server/metrics/logging/get_ops_metrics_log.ts b/src/core/server/metrics/logging/get_ops_metrics_log.ts
index 7e13f35889ec7..6211407ae86f0 100644
--- a/src/core/server/metrics/logging/get_ops_metrics_log.ts
+++ b/src/core/server/metrics/logging/get_ops_metrics_log.ts
@@ -30,10 +30,29 @@ export function getEcsOpsMetricsLog(metrics: OpsMetrics) {
// HH:mm:ss message format for backward compatibility
const uptimeValMsg = uptimeVal ? `uptime: ${numeral(uptimeVal).format('00:00:00')} ` : '';
- // Event loop delay is in ms
+ // Event loop delay metrics are in ms
const eventLoopDelayVal = process?.event_loop_delay;
const eventLoopDelayValMsg = eventLoopDelayVal
- ? `delay: ${numeral(process?.event_loop_delay).format('0.000')}`
+ ? `mean delay: ${numeral(process?.event_loop_delay).format('0.000')}`
+ : '';
+
+ const eventLoopDelayPercentiles = process?.event_loop_delay_histogram?.percentiles;
+
+ // Extract 50th, 95th and 99th percentiles for log meta
+ const eventLoopDelayHistVals = eventLoopDelayPercentiles
+ ? {
+ 50: eventLoopDelayPercentiles[50],
+ 95: eventLoopDelayPercentiles[95],
+ 99: eventLoopDelayPercentiles[99],
+ }
+ : undefined;
+ // Format message from 50th, 95th and 99th percentiles
+ const eventLoopDelayHistMsg = eventLoopDelayPercentiles
+ ? ` delay histogram: { 50: ${numeral(eventLoopDelayPercentiles['50']).format(
+ '0.000'
+ )}; 95: ${numeral(eventLoopDelayPercentiles['95']).format('0.000')}; 99: ${numeral(
+ eventLoopDelayPercentiles['99']
+ ).format('0.000')} }`
: '';
const loadEntries = {
@@ -65,6 +84,7 @@ export function getEcsOpsMetricsLog(metrics: OpsMetrics) {
},
},
eventLoopDelay: eventLoopDelayVal,
+ eventLoopDelayHistogram: eventLoopDelayHistVals,
},
host: {
os: {
@@ -75,7 +95,13 @@ export function getEcsOpsMetricsLog(metrics: OpsMetrics) {
};
return {
- message: `${processMemoryUsedInBytesMsg}${uptimeValMsg}${loadValsMsg}${eventLoopDelayValMsg}`,
+ message: [
+ processMemoryUsedInBytesMsg,
+ uptimeValMsg,
+ loadValsMsg,
+ eventLoopDelayValMsg,
+ eventLoopDelayHistMsg,
+ ].join(''),
meta,
};
}
diff --git a/src/core/server/metrics/metrics_service.test.ts b/src/core/server/metrics/metrics_service.test.ts
index d7de41fd7ccf7..27043b8fa2c8a 100644
--- a/src/core/server/metrics/metrics_service.test.ts
+++ b/src/core/server/metrics/metrics_service.test.ts
@@ -203,6 +203,7 @@ describe('MetricsService', () => {
},
"process": Object {
"eventLoopDelay": undefined,
+ "eventLoopDelayHistogram": undefined,
"memory": Object {
"heap": Object {
"usedInBytes": undefined,
diff --git a/src/core/server/plugins/discovery/plugins_discovery.test.ts b/src/core/server/plugins/discovery/plugins_discovery.test.ts
index 958e051d0476d..a6ffdff4422be 100644
--- a/src/core/server/plugins/discovery/plugins_discovery.test.ts
+++ b/src/core/server/plugins/discovery/plugins_discovery.test.ts
@@ -7,7 +7,7 @@
*/
// must be before mocks imports to avoid conflicting with `REPO_ROOT` accessor.
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { mockPackage, scanPluginSearchPathsMock } from './plugins_discovery.test.mocks';
import mockFs from 'mock-fs';
import { loggingSystemMock } from '../../logging/logging_system.mock';
diff --git a/src/core/server/plugins/integration_tests/plugins_service.test.ts b/src/core/server/plugins/integration_tests/plugins_service.test.ts
index 4170d9422f277..ebbb3fa473b6d 100644
--- a/src/core/server/plugins/integration_tests/plugins_service.test.ts
+++ b/src/core/server/plugins/integration_tests/plugins_service.test.ts
@@ -7,7 +7,7 @@
*/
// must be before mocks imports to avoid conflicting with `REPO_ROOT` accessor.
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { mockPackage, mockDiscover } from './plugins_service.test.mocks';
import { join } from 'path';
diff --git a/src/core/server/plugins/plugin.test.ts b/src/core/server/plugins/plugin.test.ts
index 513e893992005..92cbda2a69cfe 100644
--- a/src/core/server/plugins/plugin.test.ts
+++ b/src/core/server/plugins/plugin.test.ts
@@ -8,7 +8,7 @@
import { join } from 'path';
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { schema } from '@kbn/config-schema';
import { Env } from '../config';
diff --git a/src/core/server/plugins/plugin_context.test.ts b/src/core/server/plugins/plugin_context.test.ts
index 867d4d978314b..7bcf392ed510b 100644
--- a/src/core/server/plugins/plugin_context.test.ts
+++ b/src/core/server/plugins/plugin_context.test.ts
@@ -8,7 +8,7 @@
import { duration } from 'moment';
import { first } from 'rxjs/operators';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { fromRoot } from '@kbn/utils';
import {
createPluginInitializerContext,
diff --git a/src/core/server/plugins/plugins_config.test.ts b/src/core/server/plugins/plugins_config.test.ts
index d65b057fb65c0..b9225054e63ef 100644
--- a/src/core/server/plugins/plugins_config.test.ts
+++ b/src/core/server/plugins/plugins_config.test.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../config/mocks';
import { PluginsConfig, PluginsConfigType } from './plugins_config';
import { Env } from '../config';
diff --git a/src/core/server/plugins/plugins_service.test.ts b/src/core/server/plugins/plugins_service.test.ts
index 0c077d732c67b..5a05817d2111f 100644
--- a/src/core/server/plugins/plugins_service.test.ts
+++ b/src/core/server/plugins/plugins_service.test.ts
@@ -11,7 +11,8 @@ import { mockDiscover, mockPackage } from './plugins_service.test.mocks';
import { resolve, join } from 'path';
import { BehaviorSubject, from } from 'rxjs';
import { schema } from '@kbn/config-schema';
-import { createAbsolutePathSerializer, REPO_ROOT } from '@kbn/dev-utils';
+import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ConfigPath, ConfigService, Env } from '../config';
import { rawConfigServiceMock, getEnvOptions } from '../config/mocks';
diff --git a/src/core/server/plugins/plugins_system.test.ts b/src/core/server/plugins/plugins_system.test.ts
index 4cd8e4c551bea..3d8a47005b362 100644
--- a/src/core/server/plugins/plugins_system.test.ts
+++ b/src/core/server/plugins/plugins_system.test.ts
@@ -14,7 +14,7 @@ import {
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '../config';
import { configServiceMock, getEnvOptions } from '../config/mocks';
import { CoreContext } from '../core_context';
diff --git a/src/core/server/preboot/preboot_service.test.ts b/src/core/server/preboot/preboot_service.test.ts
index dd4b1cb7d1df0..77242f0c5765f 100644
--- a/src/core/server/preboot/preboot_service.test.ts
+++ b/src/core/server/preboot/preboot_service.test.ts
@@ -7,7 +7,7 @@
*/
import { nextTick } from '@kbn/test/jest';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { LoggerFactory } from '@kbn/logging';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../config/mocks';
diff --git a/src/core/server/root/index.test.ts b/src/core/server/root/index.test.ts
index 7eba051a128f0..6ea3e05b9c2c2 100644
--- a/src/core/server/root/index.test.ts
+++ b/src/core/server/root/index.test.ts
@@ -10,7 +10,7 @@ import { rawConfigService, configService, logger, mockServer } from './index.tes
import { BehaviorSubject } from 'rxjs';
import { filter, first } from 'rxjs/operators';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../config/mocks';
import { Root } from '.';
import { Env } from '../config';
diff --git a/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts b/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts
index c22c6154c2605..139cd298d28ed 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts
@@ -8,7 +8,7 @@
import path from 'path';
import { unlink } from 'fs/promises';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../../../config/mocks';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
diff --git a/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts b/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts
index 0ed9262017263..c341463b78910 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts
@@ -10,7 +10,7 @@ import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import Semver from 'semver';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../../../config/mocks';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
diff --git a/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts b/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts
index 15d985daccba6..34d1317755c14 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts
@@ -10,7 +10,7 @@ import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import Semver from 'semver';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../../../config/mocks';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
diff --git a/src/core/server/saved_objects/saved_objects_service.test.ts b/src/core/server/saved_objects/saved_objects_service.test.ts
index a4f6c019c9624..a8bda95af46f9 100644
--- a/src/core/server/saved_objects/saved_objects_service.test.ts
+++ b/src/core/server/saved_objects/saved_objects_service.test.ts
@@ -19,7 +19,7 @@ import {
import { BehaviorSubject } from 'rxjs';
import { RawPackageInfo } from '@kbn/config';
import { ByteSizeValue } from '@kbn/config-schema';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { SavedObjectsService } from './saved_objects_service';
import { mockCoreContext } from '../core_context.mock';
diff --git a/src/core/server/saved_objects/service/lib/repository.test.ts b/src/core/server/saved_objects/service/lib/repository.test.ts
index ab692b146e7f6..1668df7a82253 100644
--- a/src/core/server/saved_objects/service/lib/repository.test.ts
+++ b/src/core/server/saved_objects/service/lib/repository.test.ts
@@ -3558,6 +3558,20 @@ describe('SavedObjectsRepository', () => {
});
});
+ it('search for the right fields when typeToNamespacesMap is set', async () => {
+ const relevantOpts = {
+ ...commonOptions,
+ fields: ['title'],
+ type: '',
+ namespaces: [],
+ typeToNamespacesMap: new Map([[type, [namespace]]]),
+ };
+
+ await findSuccess(relevantOpts, namespace);
+ const esOptions = client.search.mock.calls[0][0];
+ expect(esOptions?._source ?? []).toContain('index-pattern.title');
+ });
+
it(`accepts hasReferenceOperator`, async () => {
const relevantOpts: SavedObjectsFindOptions = {
...commonOptions,
diff --git a/src/core/server/saved_objects/service/lib/repository.ts b/src/core/server/saved_objects/service/lib/repository.ts
index 0d17525016043..53bc6f158bf93 100644
--- a/src/core/server/saved_objects/service/lib/repository.ts
+++ b/src/core/server/saved_objects/service/lib/repository.ts
@@ -930,7 +930,7 @@ export class SavedObjectsRepository {
index: pit ? undefined : this.getIndicesForTypes(allowedTypes),
// If `searchAfter` is provided, we drop `from` as it will not be used for pagination.
from: searchAfter ? undefined : perPage * (page - 1),
- _source: includedFields(type, fields),
+ _source: includedFields(allowedTypes, fields),
preference,
rest_total_hits_as_int: true,
size: perPage,
@@ -938,7 +938,7 @@ export class SavedObjectsRepository {
size: perPage,
seq_no_primary_term: true,
from: perPage * (page - 1),
- _source: includedFields(type, fields),
+ _source: includedFields(allowedTypes, fields),
...(aggsObject ? { aggs: aggsObject } : {}),
...getSearchDsl(this._mappings, this._registry, {
search,
diff --git a/src/core/server/server.test.ts b/src/core/server/server.test.ts
index 112693aae0279..48547883d5f67 100644
--- a/src/core/server/server.test.ts
+++ b/src/core/server/server.test.ts
@@ -26,7 +26,7 @@ import {
} from './server.test.mocks';
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { rawConfigServiceMock, getEnvOptions } from './config/mocks';
import { Env } from './config';
import { Server } from './server';
diff --git a/src/core/server/ui_settings/integration_tests/index.test.ts b/src/core/server/ui_settings/integration_tests/index.test.ts
index ef635e90dac70..3f85beb2acec6 100644
--- a/src/core/server/ui_settings/integration_tests/index.test.ts
+++ b/src/core/server/ui_settings/integration_tests/index.test.ts
@@ -7,7 +7,7 @@
*/
import { Env } from '@kbn/config';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../../config/mocks';
import { startServers, stopServers } from './lib';
import { docExistsSuite } from './doc_exists';
diff --git a/src/core/test_helpers/kbn_server.ts b/src/core/test_helpers/kbn_server.ts
index 58720be637e2f..c326c7a35df63 100644
--- a/src/core/test_helpers/kbn_server.ts
+++ b/src/core/test_helpers/kbn_server.ts
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import {
createTestEsCluster,
CreateTestEsClusterOptions,
diff --git a/src/core/types/elasticsearch/search.ts b/src/core/types/elasticsearch/search.ts
index c28bf3c258f77..ac93a45da3258 100644
--- a/src/core/types/elasticsearch/search.ts
+++ b/src/core/types/elasticsearch/search.ts
@@ -9,6 +9,11 @@
import { ValuesType, UnionToIntersection } from 'utility-types';
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+interface AggregationsAggregationContainer extends Record {
+ aggs?: any;
+ aggregations?: any;
+}
+
type InvalidAggregationRequest = unknown;
// ensures aggregations work with requests where aggregation options are a union type,
@@ -31,7 +36,7 @@ type KeysOfSources = T extends [any]
? KeyOfSource & KeyOfSource & KeyOfSource & KeyOfSource
: Record;
-type CompositeKeysOf =
+type CompositeKeysOf =
TAggregationContainer extends {
composite: { sources: [...infer TSource] };
}
@@ -40,7 +45,7 @@ type CompositeKeysOf =
+type TopMetricKeysOf =
TAggregationContainer extends { top_metrics: { metrics: { field: infer TField } } }
? TField
: TAggregationContainer extends { top_metrics: { metrics: Array<{ field: infer TField }> } }
@@ -92,17 +97,9 @@ type HitsOf<
>
>;
-type AggregationTypeName = Exclude<
- keyof estypes.AggregationsAggregationContainer,
- 'aggs' | 'aggregations'
->;
+type AggregationMap = Partial>;
-type AggregationMap = Partial>;
-
-type TopLevelAggregationRequest = Pick<
- estypes.AggregationsAggregationContainer,
- 'aggs' | 'aggregations'
->;
+type TopLevelAggregationRequest = Pick;
type MaybeKeyed<
TAggregationContainer,
@@ -113,448 +110,460 @@ type MaybeKeyed<
: { buckets: TBucket[] };
export type AggregateOf<
- TAggregationContainer extends estypes.AggregationsAggregationContainer,
+ TAggregationContainer extends AggregationsAggregationContainer,
TDocument
-> = (Record & {
- adjacency_matrix: {
- buckets: Array<
- {
- key: string;
- doc_count: number;
- } & SubAggregateOf
- >;
- };
- auto_date_histogram: {
- interval: string;
- buckets: Array<
- {
- key: number;
- key_as_string: string;
- doc_count: number;
- } & SubAggregateOf
- >;
- };
- avg: {
- value: number | null;
- value_as_string?: string;
- };
- avg_bucket: {
- value: number | null;
- };
- boxplot: {
- min: number | null;
- max: number | null;
- q1: number | null;
- q2: number | null;
- q3: number | null;
- };
- bucket_script: {
- value: unknown;
- };
- cardinality: {
- value: number;
- };
- children: {
- doc_count: number;
- } & SubAggregateOf;
- composite: {
- after_key: CompositeKeysOf;
- buckets: Array<
- {
+> = ValuesType<
+ Pick<
+ Record & {
+ adjacency_matrix: {
+ buckets: Array<
+ {
+ key: string;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ };
+ auto_date_histogram: {
+ interval: string;
+ buckets: Array<
+ {
+ key: number;
+ key_as_string: string;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ };
+ avg: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ avg_bucket: {
+ value: number | null;
+ };
+ boxplot: {
+ min: number | null;
+ max: number | null;
+ q1: number | null;
+ q2: number | null;
+ q3: number | null;
+ };
+ bucket_script: {
+ value: unknown;
+ };
+ cardinality: {
+ value: number;
+ };
+ children: {
doc_count: number;
- key: CompositeKeysOf;
- } & SubAggregateOf
- >;
- };
- cumulative_cardinality: {
- value: number;
- };
- cumulative_sum: {
- value: number;
- };
- date_histogram: MaybeKeyed<
- TAggregationContainer,
- {
- key: number;
- key_as_string: string;
- doc_count: number;
- } & SubAggregateOf
- >;
- date_range: MaybeKeyed<
- TAggregationContainer,
- Partial<{ from: string | number; from_as_string: string }> &
- Partial<{ to: string | number; to_as_string: string }> & {
+ } & SubAggregateOf;
+ composite: {
+ after_key: CompositeKeysOf;
+ buckets: Array<
+ {
+ doc_count: number;
+ key: CompositeKeysOf;
+ } & SubAggregateOf
+ >;
+ };
+ cumulative_cardinality: {
+ value: number;
+ };
+ cumulative_sum: {
+ value: number;
+ };
+ date_histogram: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: number;
+ key_as_string: string;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ date_range: MaybeKeyed<
+ TAggregationContainer,
+ Partial<{ from: string | number; from_as_string: string }> &
+ Partial<{ to: string | number; to_as_string: string }> & {
+ doc_count: number;
+ key: string;
+ }
+ >;
+ derivative:
+ | {
+ value: number | null;
+ }
+ | undefined;
+ extended_stats: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number;
+ sum_of_squares: number | null;
+ variance: number | null;
+ variance_population: number | null;
+ variance_sampling: number | null;
+ std_deviation: number | null;
+ std_deviation_population: number | null;
+ std_deviation_sampling: number | null;
+ std_deviation_bounds: {
+ upper: number | null;
+ lower: number | null;
+ upper_population: number | null;
+ lower_population: number | null;
+ upper_sampling: number | null;
+ lower_sampling: number | null;
+ };
+ } & (
+ | {
+ min_as_string: string;
+ max_as_string: string;
+ avg_as_string: string;
+ sum_of_squares_as_string: string;
+ variance_population_as_string: string;
+ variance_sampling_as_string: string;
+ std_deviation_as_string: string;
+ std_deviation_population_as_string: string;
+ std_deviation_sampling_as_string: string;
+ std_deviation_bounds_as_string: {
+ upper: string;
+ lower: string;
+ upper_population: string;
+ lower_population: string;
+ upper_sampling: string;
+ lower_sampling: string;
+ };
+ }
+ | {}
+ );
+ extended_stats_bucket: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number | null;
+ sum_of_squares: number | null;
+ variance: number | null;
+ variance_population: number | null;
+ variance_sampling: number | null;
+ std_deviation: number | null;
+ std_deviation_population: number | null;
+ std_deviation_sampling: number | null;
+ std_deviation_bounds: {
+ upper: number | null;
+ lower: number | null;
+ upper_population: number | null;
+ lower_population: number | null;
+ upper_sampling: number | null;
+ lower_sampling: number | null;
+ };
+ };
+ filter: {
doc_count: number;
- key: string;
- }
- >;
- derivative:
- | {
- value: number | null;
- }
- | undefined;
- extended_stats: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number;
- sum_of_squares: number | null;
- variance: number | null;
- variance_population: number | null;
- variance_sampling: number | null;
- std_deviation: number | null;
- std_deviation_population: number | null;
- std_deviation_sampling: number | null;
- std_deviation_bounds: {
- upper: number | null;
- lower: number | null;
- upper_population: number | null;
- lower_population: number | null;
- upper_sampling: number | null;
- lower_sampling: number | null;
- };
- } & (
- | {
- min_as_string: string;
- max_as_string: string;
- avg_as_string: string;
- sum_of_squares_as_string: string;
- variance_population_as_string: string;
- variance_sampling_as_string: string;
- std_deviation_as_string: string;
- std_deviation_population_as_string: string;
- std_deviation_sampling_as_string: string;
- std_deviation_bounds_as_string: {
- upper: string;
- lower: string;
- upper_population: string;
- lower_population: string;
- upper_sampling: string;
- lower_sampling: string;
+ } & SubAggregateOf;
+ filters: {
+ buckets: TAggregationContainer extends { filters: { filters: any[] } }
+ ? Array<
+ {
+ doc_count: number;
+ } & SubAggregateOf
+ >
+ : TAggregationContainer extends { filters: { filters: Record } }
+ ? {
+ [key in keyof TAggregationContainer['filters']['filters']]: {
+ doc_count: number;
+ } & SubAggregateOf;
+ } & (TAggregationContainer extends {
+ filters: { other_bucket_key: infer TOtherBucketKey };
+ }
+ ? Record<
+ TOtherBucketKey & string,
+ { doc_count: number } & SubAggregateOf
+ >
+ : unknown) &
+ (TAggregationContainer extends { filters: { other_bucket: true } }
+ ? {
+ _other: { doc_count: number } & SubAggregateOf<
+ TAggregationContainer,
+ TDocument
+ >;
+ }
+ : unknown)
+ : unknown;
+ };
+ geo_bounds: {
+ top_left: {
+ lat: number | null;
+ lon: number | null;
};
- }
- | {}
- );
- extended_stats_bucket: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number | null;
- sum_of_squares: number | null;
- variance: number | null;
- variance_population: number | null;
- variance_sampling: number | null;
- std_deviation: number | null;
- std_deviation_population: number | null;
- std_deviation_sampling: number | null;
- std_deviation_bounds: {
- upper: number | null;
- lower: number | null;
- upper_population: number | null;
- lower_population: number | null;
- upper_sampling: number | null;
- lower_sampling: number | null;
- };
- };
- filter: {
- doc_count: number;
- } & SubAggregateOf;
- filters: {
- buckets: TAggregationContainer extends { filters: { filters: any[] } }
- ? Array<
+ bottom_right: {
+ lat: number | null;
+ lon: number | null;
+ };
+ };
+ geo_centroid: {
+ count: number;
+ location: {
+ lat: number;
+ lon: number;
+ };
+ };
+ geo_distance: MaybeKeyed<
+ TAggregationContainer,
+ {
+ from: number;
+ to?: number;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ geo_hash: {
+ buckets: Array<
{
doc_count: number;
+ key: string;
} & SubAggregateOf
- >
- : TAggregationContainer extends { filters: { filters: Record } }
- ? {
- [key in keyof TAggregationContainer['filters']['filters']]: {
+ >;
+ };
+ geotile_grid: {
+ buckets: Array<
+ {
doc_count: number;
- } & SubAggregateOf;
- } & (TAggregationContainer extends { filters: { other_bucket_key: infer TOtherBucketKey } }
- ? Record<
- TOtherBucketKey & string,
- { doc_count: number } & SubAggregateOf
- >
- : unknown) &
- (TAggregationContainer extends { filters: { other_bucket: true } }
- ? { _other: { doc_count: number } & SubAggregateOf }
- : unknown)
- : unknown;
- };
- geo_bounds: {
- top_left: {
- lat: number | null;
- lon: number | null;
- };
- bottom_right: {
- lat: number | null;
- lon: number | null;
- };
- };
- geo_centroid: {
- count: number;
- location: {
- lat: number;
- lon: number;
- };
- };
- geo_distance: MaybeKeyed<
- TAggregationContainer,
- {
- from: number;
- to?: number;
- doc_count: number;
- } & SubAggregateOf
- >;
- geo_hash: {
- buckets: Array<
- {
+ key: string;
+ } & SubAggregateOf
+ >;
+ };
+ global: {
doc_count: number;
- key: string;
- } & SubAggregateOf
- >;
- };
- geotile_grid: {
- buckets: Array<
- {
+ } & SubAggregateOf;
+ histogram: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: number;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ ip_range: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: string;
+ from?: string;
+ to?: string;
+ doc_count: number;
+ },
+ TAggregationContainer extends { ip_range: { ranges: Array } }
+ ? TRangeType extends { key: infer TKeys }
+ ? TKeys
+ : string
+ : string
+ >;
+ inference: {
+ value: number;
+ prediction_probability: number;
+ prediction_score: number;
+ };
+ max: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ max_bucket: {
+ value: number | null;
+ };
+ min: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ min_bucket: {
+ value: number | null;
+ };
+ median_absolute_deviation: {
+ value: number | null;
+ };
+ moving_avg:
+ | {
+ value: number | null;
+ }
+ | undefined;
+ moving_fn: {
+ value: number | null;
+ };
+ moving_percentiles: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record | undefined;
+ missing: {
doc_count: number;
- key: string;
- } & SubAggregateOf
- >;
- };
- global: {
- doc_count: number;
- } & SubAggregateOf;
- histogram: MaybeKeyed<
- TAggregationContainer,
- {
- key: number;
- doc_count: number;
- } & SubAggregateOf
- >;
- ip_range: MaybeKeyed<
- TAggregationContainer,
- {
- key: string;
- from?: string;
- to?: string;
- doc_count: number;
- },
- TAggregationContainer extends { ip_range: { ranges: Array } }
- ? TRangeType extends { key: infer TKeys }
- ? TKeys
- : string
- : string
- >;
- inference: {
- value: number;
- prediction_probability: number;
- prediction_score: number;
- };
- max: {
- value: number | null;
- value_as_string?: string;
- };
- max_bucket: {
- value: number | null;
- };
- min: {
- value: number | null;
- value_as_string?: string;
- };
- min_bucket: {
- value: number | null;
- };
- median_absolute_deviation: {
- value: number | null;
- };
- moving_avg:
- | {
+ } & SubAggregateOf;
+ multi_terms: {
+ doc_count_error_upper_bound: number;
+ sum_other_doc_count: number;
+ buckets: Array<
+ {
+ doc_count: number;
+ key: string[];
+ } & SubAggregateOf
+ >;
+ };
+ nested: {
+ doc_count: number;
+ } & SubAggregateOf;
+ normalize: {
value: number | null;
- }
- | undefined;
- moving_fn: {
- value: number | null;
- };
- moving_percentiles: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record | undefined;
- missing: {
- doc_count: number;
- } & SubAggregateOf;
- multi_terms: {
- doc_count_error_upper_bound: number;
- sum_other_doc_count: number;
- buckets: Array<
- {
+ // TODO: should be perhaps based on input? ie when `format` is specified
+ value_as_string?: string;
+ };
+ parent: {
doc_count: number;
- key: string[];
- } & SubAggregateOf
- >;
- };
- nested: {
- doc_count: number;
- } & SubAggregateOf;
- normalize: {
- value: number | null;
- // TODO: should be perhaps based on input? ie when `format` is specified
- value_as_string?: string;
- };
- parent: {
- doc_count: number;
- } & SubAggregateOf;
- percentiles: {
- values: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record;
- };
- percentile_ranks: {
- values: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record;
- };
- percentiles_bucket: {
- values: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record;
- };
- range: MaybeKeyed<
- TAggregationContainer,
- {
- key: string;
- from?: number;
- from_as_string?: string;
- to?: number;
- to_as_string?: string;
- doc_count: number;
- },
- TAggregationContainer extends { range: { ranges: Array } }
- ? TRangeType extends { key: infer TKeys }
- ? TKeys
- : string
- : string
- >;
- rare_terms: Array<
- {
- key: string | number;
- doc_count: number;
- } & SubAggregateOf
- >;
- rate: {
- value: number | null;
- };
- reverse_nested: {
- doc_count: number;
- } & SubAggregateOf;
- sampler: {
- doc_count: number;
- } & SubAggregateOf;
- scripted_metric: {
- value: unknown;
- };
- serial_diff: {
- value: number | null;
- // TODO: should be perhaps based on input? ie when `format` is specified
- value_as_string?: string;
- };
- significant_terms: {
- doc_count: number;
- bg_count: number;
- buckets: Array<
- {
- key: string | number;
- score: number;
+ } & SubAggregateOf;
+ percentiles: {
+ values: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record;
+ };
+ percentile_ranks: {
+ values: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record;
+ };
+ percentiles_bucket: {
+ values: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record;
+ };
+ range: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: string;
+ from?: number;
+ from_as_string?: string;
+ to?: number;
+ to_as_string?: string;
+ doc_count: number;
+ },
+ TAggregationContainer extends { range: { ranges: Array } }
+ ? TRangeType extends { key: infer TKeys }
+ ? TKeys
+ : string
+ : string
+ >;
+ rare_terms: Array<
+ {
+ key: string | number;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ rate: {
+ value: number | null;
+ };
+ reverse_nested: {
+ doc_count: number;
+ } & SubAggregateOf;
+ sampler: {
+ doc_count: number;
+ } & SubAggregateOf;
+ scripted_metric: {
+ value: unknown;
+ };
+ serial_diff: {
+ value: number | null;
+ // TODO: should be perhaps based on input? ie when `format` is specified
+ value_as_string?: string;
+ };
+ significant_terms: {
doc_count: number;
bg_count: number;
- } & SubAggregateOf
- >;
- };
- significant_text: {
- doc_count: number;
- buckets: Array<{
- key: string;
- doc_count: number;
- score: number;
- bg_count: number;
- }>;
- };
- stats: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number;
- } & (
- | {
- min_as_string: string;
- max_as_string: string;
- avg_as_string: string;
- sum_as_string: string;
- }
- | {}
- );
- stats_bucket: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number;
- };
- string_stats: {
- count: number;
- min_length: number | null;
- max_length: number | null;
- avg_length: number | null;
- entropy: number | null;
- distribution: Record;
- };
- sum: {
- value: number | null;
- value_as_string?: string;
- };
- sum_bucket: {
- value: number | null;
- };
- terms: {
- doc_count_error_upper_bound: number;
- sum_other_doc_count: number;
- buckets: Array<
- {
+ buckets: Array<
+ {
+ key: string | number;
+ score: number;
+ doc_count: number;
+ bg_count: number;
+ } & SubAggregateOf
+ >;
+ };
+ significant_text: {
doc_count: number;
- key: string | number;
- } & SubAggregateOf
- >;
- };
- top_hits: {
- hits: {
- total: {
+ buckets: Array<{
+ key: string;
+ doc_count: number;
+ score: number;
+ bg_count: number;
+ }>;
+ };
+ stats: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number;
+ } & (
+ | {
+ min_as_string: string;
+ max_as_string: string;
+ avg_as_string: string;
+ sum_as_string: string;
+ }
+ | {}
+ );
+ stats_bucket: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number;
+ };
+ string_stats: {
+ count: number;
+ min_length: number | null;
+ max_length: number | null;
+ avg_length: number | null;
+ entropy: number | null;
+ distribution: Record;
+ };
+ sum: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ sum_bucket: {
+ value: number | null;
+ };
+ terms: {
+ doc_count_error_upper_bound: number;
+ sum_other_doc_count: number;
+ buckets: Array<
+ {
+ doc_count: number;
+ key: string | number;
+ } & SubAggregateOf
+ >;
+ };
+ top_hits: {
+ hits: {
+ total: {
+ value: number;
+ relation: 'eq' | 'gte';
+ };
+ max_score: number | null;
+ hits: TAggregationContainer extends { top_hits: estypes.AggregationsTopHitsAggregation }
+ ? HitsOf
+ : estypes.SearchHitsMetadata;
+ };
+ };
+ top_metrics: {
+ top: Array<{
+ sort: number[] | string[];
+ metrics: Record, string | number | null>;
+ }>;
+ };
+ weighted_avg: { value: number | null };
+ value_count: {
value: number;
- relation: 'eq' | 'gte';
};
- max_score: number | null;
- hits: TAggregationContainer extends { top_hits: estypes.AggregationsTopHitsAggregation }
- ? HitsOf
- : estypes.SearchHitsMetadata;
- };
- };
- top_metrics: {
- top: Array<{
- sort: number[] | string[];
- metrics: Record, string | number | null>;
- }>;
- };
- weighted_avg: { value: number | null };
- value_count: {
- value: number;
- };
- // t_test: {} not defined
-})[ValidAggregationKeysOf & AggregationTypeName];
+ // t_test: {} not defined
+ },
+ Exclude, 'aggs' | 'aggregations'> & string
+ >
+>;
type AggregateOfMap = {
- [TAggregationName in keyof TAggregationMap]: Required[TAggregationName] extends estypes.AggregationsAggregationContainer
+ [TAggregationName in keyof TAggregationMap]: Required[TAggregationName] extends AggregationsAggregationContainer
? AggregateOf
: never; // using never means we effectively ignore optional keys, using {} creates a union type of { ... } | {}
};
diff --git a/src/dev/build/lib/integration_tests/version_info.test.ts b/src/dev/build/lib/integration_tests/version_info.test.ts
index e7a3a04c04734..9385de6e00a4f 100644
--- a/src/dev/build/lib/integration_tests/version_info.test.ts
+++ b/src/dev/build/lib/integration_tests/version_info.test.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { kibanaPackageJson as pkg } from '@kbn/dev-utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import { getVersionInfo } from '../version_info';
diff --git a/src/dev/build/tasks/install_chromium.js b/src/dev/build/tasks/install_chromium.js
index ad60019ea81a4..2bcceb33fad00 100644
--- a/src/dev/build/tasks/install_chromium.js
+++ b/src/dev/build/tasks/install_chromium.js
@@ -6,10 +6,8 @@
* Side Public License, v 1.
*/
-import { first } from 'rxjs/operators';
-
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { installBrowser } from '../../../../x-pack/plugins/reporting/server/browsers/install';
+import { install } from '../../../../x-pack/plugins/screenshotting/server/utils';
export const InstallChromium = {
description: 'Installing Chromium',
@@ -22,13 +20,23 @@ export const InstallChromium = {
// revert after https://github.com/elastic/kibana/issues/109949
if (target === 'darwin-arm64') continue;
- const { binaryPath$ } = installBrowser(
- log,
- build.resolvePathForPlatform(platform, 'x-pack/plugins/reporting/chromium'),
+ const logger = {
+ get: log.withType.bind(log),
+ debug: log.debug.bind(log),
+ info: log.info.bind(log),
+ warn: log.warning.bind(log),
+ trace: log.verbose.bind(log),
+ error: log.error.bind(log),
+ fatal: log.error.bind(log),
+ log: log.write.bind(log),
+ };
+
+ await install(
+ logger,
+ build.resolvePathForPlatform(platform, 'x-pack/plugins/screenshotting/chromium'),
platform.getName(),
platform.getArchitecture()
);
- await binaryPath$.pipe(first()).toPromise();
}
},
};
diff --git a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts
index 02b469820f900..cc1ffb5f3e301 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts
+++ b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts
@@ -10,7 +10,8 @@ import { resolve } from 'path';
import { readFileSync } from 'fs';
import { copyFile } from 'fs/promises';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import Mustache from 'mustache';
import { compressTar, copyAll, mkdirp, write, Config } from '../../../lib';
diff --git a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
index 895c42ad5f47d..a7d8fe684ef95 100755
--- a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
+++ b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
@@ -354,6 +354,7 @@ kibana_vars=(
xpack.security.showInsecureClusterWarning
xpack.securitySolution.alertMergeStrategy
xpack.securitySolution.alertIgnoreFields
+ xpack.securitySolution.maxExceptionsImportSize
xpack.securitySolution.maxRuleImportExportSize
xpack.securitySolution.maxRuleImportPayloadBytes
xpack.securitySolution.maxTimelineImportExportSize
diff --git a/src/dev/build/tasks/os_packages/docker_generator/run.ts b/src/dev/build/tasks/os_packages/docker_generator/run.ts
index 6a192baed3fa3..085b4393caa66 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/run.ts
+++ b/src/dev/build/tasks/os_packages/docker_generator/run.ts
@@ -10,7 +10,8 @@ import { access, link, unlink, chmod } from 'fs';
import { resolve, basename } from 'path';
import { promisify } from 'util';
-import { ToolingLog, kibanaPackageJson } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { kibanaPackageJson } from '@kbn/utils';
import { write, copyAll, mkdirp, exec, Config, Build } from '../../../lib';
import * as dockerTemplates from './templates';
diff --git a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile
index dbdace85eda01..e9a6ef3539692 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile
+++ b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile
@@ -2,9 +2,9 @@
# Build stage 0
# Extract Kibana and make various file manipulations.
################################################################################
-ARG BASE_REGISTRY=registry1.dsop.io
+ARG BASE_REGISTRY=registry1.dso.mil
ARG BASE_IMAGE=redhat/ubi/ubi8
-ARG BASE_TAG=8.4
+ARG BASE_TAG=8.5
FROM ${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG} as prep_files
diff --git a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml
index 24614039e5eb7..1c7926c2fcbc2 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml
+++ b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml
@@ -14,7 +14,7 @@ tags:
# Build args passed to Dockerfile ARGs
args:
BASE_IMAGE: 'redhat/ubi/ubi8'
- BASE_TAG: '8.4'
+ BASE_TAG: '8.5'
# Docker image labels
labels:
@@ -59,4 +59,4 @@ maintainers:
- email: "yalabe.dukuly@anchore.com"
name: "Yalabe Dukuly"
username: "yalabe.dukuly"
- cht_member: true
\ No newline at end of file
+ cht_member: true
diff --git a/src/dev/chromium_version.ts b/src/dev/chromium_version.ts
index 410fcc72fbc0f..1f55330a92bb6 100644
--- a/src/dev/chromium_version.ts
+++ b/src/dev/chromium_version.ts
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { run, REPO_ROOT, ToolingLog } from '@kbn/dev-utils';
+import { run, ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import chalk from 'chalk';
import cheerio from 'cheerio';
import fs from 'fs';
diff --git a/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js b/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js
index 05af7c2a154a4..40d36ed46ea34 100644
--- a/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js
+++ b/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js
@@ -7,7 +7,8 @@
*/
import { enumeratePatterns } from '../team_assignment/enumerate_patterns';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const log = new ToolingLog({
level: 'info',
@@ -15,16 +16,17 @@ const log = new ToolingLog({
});
describe(`enumeratePatterns`, () => {
- it(`should resolve x-pack/plugins/reporting/server/browsers/extract/unzip.ts to kibana-reporting`, () => {
+ it(`should resolve x-pack/plugins/screenshotting/server/browsers/extract/unzip.ts to kibana-screenshotting`, () => {
const actual = enumeratePatterns(REPO_ROOT)(log)(
- new Map([['x-pack/plugins/reporting', ['kibana-reporting']]])
+ new Map([['x-pack/plugins/screenshotting', ['kibana-screenshotting']]])
);
- expect(
- actual[0].includes(
- 'x-pack/plugins/reporting/server/browsers/extract/unzip.ts kibana-reporting'
- )
- ).toBe(true);
+ expect(actual).toHaveProperty(
+ '0',
+ expect.arrayContaining([
+ 'x-pack/plugins/screenshotting/server/browsers/extract/unzip.ts kibana-screenshotting',
+ ])
+ );
});
it(`should resolve src/plugins/charts/common/static/color_maps/color_maps.ts to kibana-app`, () => {
const actual = enumeratePatterns(REPO_ROOT)(log)(
diff --git a/src/dev/code_coverage/ingest_coverage/team_assignment/index.js b/src/dev/code_coverage/ingest_coverage/team_assignment/index.js
index 0e341a3aac1dc..a38c4ee50b40a 100644
--- a/src/dev/code_coverage/ingest_coverage/team_assignment/index.js
+++ b/src/dev/code_coverage/ingest_coverage/team_assignment/index.js
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { run, createFlagError, REPO_ROOT } from '@kbn/dev-utils';
+import { run, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { parse } from './parse_owners';
import { flush } from './flush';
import { enumeratePatterns } from './enumerate_patterns';
diff --git a/src/dev/ensure_all_tests_in_ci_group.ts b/src/dev/ensure_all_tests_in_ci_group.ts
index aeccefae05d2c..a2d9729d3352b 100644
--- a/src/dev/ensure_all_tests_in_ci_group.ts
+++ b/src/dev/ensure_all_tests_in_ci_group.ts
@@ -12,7 +12,8 @@ import Fs from 'fs/promises';
import execa from 'execa';
import { safeLoad } from 'js-yaml';
-import { run, REPO_ROOT } from '@kbn/dev-utils';
+import { run } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { schema } from '@kbn/config-schema';
const RELATIVE_JOBS_YAML_PATH = '.ci/ci_groups.yml';
diff --git a/src/dev/eslint/run_eslint_with_types.ts b/src/dev/eslint/run_eslint_with_types.ts
index 750011dea1031..0f2a10d07d681 100644
--- a/src/dev/eslint/run_eslint_with_types.ts
+++ b/src/dev/eslint/run_eslint_with_types.ts
@@ -14,7 +14,8 @@ import execa from 'execa';
import * as Rx from 'rxjs';
import { mergeMap, reduce } from 'rxjs/operators';
import { supportsColor } from 'chalk';
-import { REPO_ROOT, run, createFailError } from '@kbn/dev-utils';
+import { run, createFailError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { lastValueFrom } from '@kbn/std';
import { PROJECTS } from '../typescript/projects';
diff --git a/src/dev/license_checker/config.ts b/src/dev/license_checker/config.ts
index 52b1f816090df..9674694c0d655 100644
--- a/src/dev/license_checker/config.ts
+++ b/src/dev/license_checker/config.ts
@@ -76,6 +76,6 @@ export const LICENSE_OVERRIDES = {
'jsts@1.6.2': ['Eclipse Distribution License - v 1.0'], // cf. https://github.com/bjornharrtell/jsts
'@mapbox/jsonlint-lines-primitives@2.0.2': ['MIT'], // license in readme https://github.com/tmcw/jsonlint
'@elastic/ems-client@8.0.0': ['Elastic License 2.0'],
- '@elastic/eui@41.0.0': ['SSPL-1.0 OR Elastic License 2.0'],
+ '@elastic/eui@41.2.3': ['SSPL-1.0 OR Elastic License 2.0'],
'language-subtag-registry@0.3.21': ['CC-BY-4.0'], // retired ODC‑By license https://github.com/mattcg/language-subtag-registry
};
diff --git a/src/dev/plugin_discovery/find_plugins.ts b/src/dev/plugin_discovery/find_plugins.ts
index f1725f34d1f8e..53a53bc08e15b 100644
--- a/src/dev/plugin_discovery/find_plugins.ts
+++ b/src/dev/plugin_discovery/find_plugins.ts
@@ -8,11 +8,9 @@
import Path from 'path';
import { getPluginSearchPaths } from '@kbn/config';
-import {
- KibanaPlatformPlugin,
- REPO_ROOT,
- simpleKibanaPlatformPluginDiscovery,
-} from '@kbn/dev-utils';
+import { KibanaPlatformPlugin, simpleKibanaPlatformPluginDiscovery } from '@kbn/dev-utils';
+
+import { REPO_ROOT } from '@kbn/utils';
export interface SearchOptions {
oss: boolean;
diff --git a/src/dev/run_build_docs_cli.ts b/src/dev/run_build_docs_cli.ts
index aad524b4437d3..8ee75912c1a7e 100644
--- a/src/dev/run_build_docs_cli.ts
+++ b/src/dev/run_build_docs_cli.ts
@@ -9,7 +9,8 @@
import Path from 'path';
import dedent from 'dedent';
-import { run, REPO_ROOT, createFailError } from '@kbn/dev-utils';
+import { run, createFailError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const DEFAULT_DOC_REPO_PATH = Path.resolve(REPO_ROOT, '..', 'docs');
diff --git a/src/dev/run_find_plugins_with_circular_deps.ts b/src/dev/run_find_plugins_with_circular_deps.ts
index f7974b464fcaf..f9ee7bd84c54f 100644
--- a/src/dev/run_find_plugins_with_circular_deps.ts
+++ b/src/dev/run_find_plugins_with_circular_deps.ts
@@ -10,7 +10,8 @@ import dedent from 'dedent';
import { parseDependencyTree, parseCircular, prettyCircular } from 'dpdm';
import { relative } from 'path';
import { getPluginSearchPaths } from '@kbn/config';
-import { REPO_ROOT, run } from '@kbn/dev-utils';
+import { run } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
interface Options {
debug?: boolean;
diff --git a/src/dev/run_precommit_hook.js b/src/dev/run_precommit_hook.js
index a7bd0a9f57f6e..dfa3a94426bb2 100644
--- a/src/dev/run_precommit_hook.js
+++ b/src/dev/run_precommit_hook.js
@@ -8,7 +8,8 @@
import SimpleGit from 'simple-git/promise';
-import { run, combineErrors, createFlagError, REPO_ROOT } from '@kbn/dev-utils';
+import { run, combineErrors, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import * as Eslint from './eslint';
import * as Stylelint from './stylelint';
import { getFilesForCommit, checkFileCasing } from './precommit_hook';
diff --git a/src/dev/typescript/build_ts_refs.ts b/src/dev/typescript/build_ts_refs.ts
index aaa8c0d12fa4d..f3896cf676e27 100644
--- a/src/dev/typescript/build_ts_refs.ts
+++ b/src/dev/typescript/build_ts_refs.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { ToolingLog, REPO_ROOT, ProcRunner } from '@kbn/dev-utils';
+import { ToolingLog, ProcRunner } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ROOT_REFS_CONFIG_PATH } from './root_refs_config';
import { Project } from './project';
diff --git a/src/dev/typescript/build_ts_refs_cli.ts b/src/dev/typescript/build_ts_refs_cli.ts
index c68424c2a98f7..09866315fc8dd 100644
--- a/src/dev/typescript/build_ts_refs_cli.ts
+++ b/src/dev/typescript/build_ts_refs_cli.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { run, REPO_ROOT, createFlagError } from '@kbn/dev-utils';
+import { run, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import del from 'del';
import { RefOutputCache } from './ref_output_cache';
diff --git a/src/dev/typescript/ref_output_cache/ref_output_cache.ts b/src/dev/typescript/ref_output_cache/ref_output_cache.ts
index b7e641ceb33d5..32b08ec1ba0df 100644
--- a/src/dev/typescript/ref_output_cache/ref_output_cache.ts
+++ b/src/dev/typescript/ref_output_cache/ref_output_cache.ts
@@ -9,7 +9,8 @@
import Path from 'path';
import Fs from 'fs/promises';
-import { ToolingLog, kibanaPackageJson, extract } from '@kbn/dev-utils';
+import { ToolingLog, extract } from '@kbn/dev-utils';
+import { kibanaPackageJson } from '@kbn/utils';
import del from 'del';
import tempy from 'tempy';
diff --git a/src/dev/typescript/root_refs_config.ts b/src/dev/typescript/root_refs_config.ts
index f4aa88f1ea6b2..e20b1ab46cd82 100644
--- a/src/dev/typescript/root_refs_config.ts
+++ b/src/dev/typescript/root_refs_config.ts
@@ -10,7 +10,8 @@ import Path from 'path';
import Fs from 'fs/promises';
import dedent from 'dedent';
-import { REPO_ROOT, ToolingLog } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import normalize from 'normalize-path';
import { PROJECTS } from './projects';
diff --git a/src/plugins/chart_expressions/expression_heatmap/public/expression_renderers/index.scss b/src/plugins/chart_expressions/expression_heatmap/public/expression_renderers/index.scss
index 6e1afd91c476d..fb004dfce4ec0 100644
--- a/src/plugins/chart_expressions/expression_heatmap/public/expression_renderers/index.scss
+++ b/src/plugins/chart_expressions/expression_heatmap/public/expression_renderers/index.scss
@@ -9,14 +9,6 @@
padding: $euiSizeS;
}
-.heatmap-chart__empty {
- height: 100%;
- display: flex;
- flex-direction: column;
- align-items: center;
- justify-content: center;
-}
-
.heatmap-chart-icon__subdued {
fill: $euiTextSubduedColor;
}
diff --git a/src/plugins/charts/public/static/components/empty_placeholder.scss b/src/plugins/charts/public/static/components/empty_placeholder.scss
new file mode 100644
index 0000000000000..3f98da9eecb6a
--- /dev/null
+++ b/src/plugins/charts/public/static/components/empty_placeholder.scss
@@ -0,0 +1,7 @@
+.chart__empty-placeholder {
+ height: 100%;
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ justify-content: center;
+}
\ No newline at end of file
diff --git a/src/plugins/charts/public/static/components/empty_placeholder.tsx b/src/plugins/charts/public/static/components/empty_placeholder.tsx
index db3f3fb6739d5..e376120c9cd9e 100644
--- a/src/plugins/charts/public/static/components/empty_placeholder.tsx
+++ b/src/plugins/charts/public/static/components/empty_placeholder.tsx
@@ -9,15 +9,20 @@
import React from 'react';
import { EuiIcon, EuiText, IconType, EuiSpacer } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n-react';
+import './empty_placeholder.scss';
-export const EmptyPlaceholder = (props: { icon: IconType }) => (
+export const EmptyPlaceholder = ({
+ icon,
+ message =