From 8f8cba50135e6d692add3bc7aa160bbddfdd4c81 Mon Sep 17 00:00:00 2001
From: Vadim Dalecky
Date: Thu, 23 Jul 2020 04:36:52 -0700
Subject: [PATCH 01/59] =?UTF-8?q?fix:=20=F0=9F=90=9B=20don't=20show=20acti?=
=?UTF-8?q?on=20in=20dashboard=5Fonly=20mode=20(#73010)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
x-pack/plugins/discover_enhanced/kibana.json | 2 +-
.../abstract_explore_data_action.ts | 12 ++++++++++++
.../explore_data_chart_action.test.ts | 17 ++++++++++++++++-
.../explore_data_context_menu_action.test.ts | 14 +++++++++++++-
.../plugins/discover_enhanced/public/plugin.ts | 3 +++
5 files changed, 45 insertions(+), 3 deletions(-)
diff --git a/x-pack/plugins/discover_enhanced/kibana.json b/x-pack/plugins/discover_enhanced/kibana.json
index fbd04fe009687..531a84cd4c0e0 100644
--- a/x-pack/plugins/discover_enhanced/kibana.json
+++ b/x-pack/plugins/discover_enhanced/kibana.json
@@ -5,7 +5,7 @@
"server": true,
"ui": true,
"requiredPlugins": ["uiActions", "embeddable", "discover"],
- "optionalPlugins": ["share"],
+ "optionalPlugins": ["share", "kibanaLegacy"],
"configPath": ["xpack", "discoverEnhanced"],
"requiredBundles": ["kibanaUtils", "data"]
}
diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts
index 59359fb35f544..3aec0ce238c3c 100644
--- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts
+++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts
@@ -9,6 +9,7 @@ import { DiscoverStart } from '../../../../../../src/plugins/discover/public';
import { EmbeddableStart } from '../../../../../../src/plugins/embeddable/public';
import { ViewMode, IEmbeddable } from '../../../../../../src/plugins/embeddable/public';
import { StartServicesGetter } from '../../../../../../src/plugins/kibana_utils/public';
+import { KibanaLegacyStart } from '../../../../../../src/plugins/kibana_legacy/public';
import { CoreStart } from '../../../../../../src/core/public';
import { KibanaURL } from './kibana_url';
import * as shared from './shared';
@@ -18,6 +19,11 @@ export const ACTION_EXPLORE_DATA = 'ACTION_EXPLORE_DATA';
export interface PluginDeps {
discover: Pick;
embeddable: Pick;
+ kibanaLegacy?: {
+ dashboardConfig: {
+ getHideWriteControls: KibanaLegacyStart['dashboardConfig']['getHideWriteControls'];
+ };
+ };
}
export interface CoreDeps {
@@ -42,6 +48,12 @@ export abstract class AbstractExploreDataAction {
if (!embeddable) return false;
+
+ const isDashboardOnlyMode = !!this.params
+ .start()
+ .plugins.kibanaLegacy?.dashboardConfig.getHideWriteControls();
+ if (isDashboardOnlyMode) return false;
+
if (!this.params.start().plugins.discover.urlGenerator) return false;
if (!shared.hasExactlyOneIndexPattern(embeddable)) return false;
if (embeddable.getInput().viewMode !== ViewMode.VIEW) return false;
diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts
index 0d22f0a36d418..6c3ed7a2fe778 100644
--- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts
+++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts
@@ -34,7 +34,10 @@ afterEach(() => {
i18nTranslateSpy.mockClear();
});
-const setup = ({ useRangeEvent = false }: { useRangeEvent?: boolean } = {}) => {
+const setup = ({
+ useRangeEvent = false,
+ dashboardOnlyMode = false,
+}: { useRangeEvent?: boolean; dashboardOnlyMode?: boolean } = {}) => {
type UrlGenerator = UrlGeneratorContract<'DISCOVER_APP_URL_GENERATOR'>;
const core = coreMock.createStart();
@@ -54,6 +57,11 @@ const setup = ({ useRangeEvent = false }: { useRangeEvent?: boolean } = {}) => {
embeddable: {
filtersAndTimeRangeFromContext,
},
+ kibanaLegacy: {
+ dashboardConfig: {
+ getHideWriteControls: () => dashboardOnlyMode,
+ },
+ },
};
const params: Params = {
@@ -181,6 +189,13 @@ describe('"Explore underlying data" panel action', () => {
expect(isCompatible).toBe(false);
});
+
+ test('return false for dashboard_only mode', async () => {
+ const { action, context } = setup({ dashboardOnlyMode: true });
+ const isCompatible = await action.isCompatible(context);
+
+ expect(isCompatible).toBe(false);
+ });
});
describe('getHref()', () => {
diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts
index c362e554e96c0..1422cc871cde8 100644
--- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts
+++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts
@@ -28,7 +28,7 @@ afterEach(() => {
i18nTranslateSpy.mockClear();
});
-const setup = () => {
+const setup = ({ dashboardOnlyMode = false }: { dashboardOnlyMode?: boolean } = {}) => {
type UrlGenerator = UrlGeneratorContract<'DISCOVER_APP_URL_GENERATOR'>;
const core = coreMock.createStart();
@@ -48,6 +48,11 @@ const setup = () => {
embeddable: {
filtersAndTimeRangeFromContext,
},
+ kibanaLegacy: {
+ dashboardConfig: {
+ getHideWriteControls: () => dashboardOnlyMode,
+ },
+ },
};
const params: Params = {
@@ -167,6 +172,13 @@ describe('"Explore underlying data" panel action', () => {
expect(isCompatible).toBe(false);
});
+
+ test('return false for dashboard_only mode', async () => {
+ const { action, context } = setup({ dashboardOnlyMode: true });
+ const isCompatible = await action.isCompatible(context);
+
+ expect(isCompatible).toBe(false);
+ });
});
describe('getHref()', () => {
diff --git a/x-pack/plugins/discover_enhanced/public/plugin.ts b/x-pack/plugins/discover_enhanced/public/plugin.ts
index 9613a9a8e3c8c..4b018354aa092 100644
--- a/x-pack/plugins/discover_enhanced/public/plugin.ts
+++ b/x-pack/plugins/discover_enhanced/public/plugin.ts
@@ -15,6 +15,7 @@ import {
import { createStartServicesGetter } from '../../../../src/plugins/kibana_utils/public';
import { DiscoverSetup, DiscoverStart } from '../../../../src/plugins/discover/public';
import { SharePluginSetup, SharePluginStart } from '../../../../src/plugins/share/public';
+import { KibanaLegacySetup, KibanaLegacyStart } from '../../../../src/plugins/kibana_legacy/public';
import {
EmbeddableSetup,
EmbeddableStart,
@@ -39,6 +40,7 @@ declare module '../../../../src/plugins/ui_actions/public' {
export interface DiscoverEnhancedSetupDependencies {
discover: DiscoverSetup;
embeddable: EmbeddableSetup;
+ kibanaLegacy?: KibanaLegacySetup;
share?: SharePluginSetup;
uiActions: UiActionsSetup;
}
@@ -46,6 +48,7 @@ export interface DiscoverEnhancedSetupDependencies {
export interface DiscoverEnhancedStartDependencies {
discover: DiscoverStart;
embeddable: EmbeddableStart;
+ kibanaLegacy?: KibanaLegacyStart;
share?: SharePluginStart;
uiActions: UiActionsStart;
}
From 5f6b9353e734e6da39fbbe34548700ae4025eed0 Mon Sep 17 00:00:00 2001
From: Kevin Logan <56395104+kevinlog@users.noreply.github.com>
Date: Thu, 23 Jul 2020 07:38:27 -0400
Subject: [PATCH 02/59] [SECURITY_SOLUTION] update Elastic Endpoint text in
rules (#72613)
---
x-pack/plugins/lists/common/constants.ts | 4 +-
...collection_cloudtrail_logging_created.json | 1 +
...l_access_attempted_bypass_of_okta_mfa.json | 1 +
...ccess_aws_iam_assume_role_brute_force.json | 49 ++++++++++++++++++
...ial_access_iam_user_addition_to_group.json | 1 +
...okta_brute_force_or_password_spraying.json | 51 +++++++++++++++++++
..._access_secretsmanager_getsecretvalue.json | 1 +
...se_evasion_cloudtrail_logging_deleted.json | 1 +
..._evasion_cloudtrail_logging_suspended.json | 1 +
...nse_evasion_cloudwatch_alarm_deletion.json | 1 +
..._evasion_config_service_rule_deletion.json | 1 +
...vasion_configuration_recorder_stopped.json | 1 +
...defense_evasion_ec2_flow_log_deletion.json | 1 +
...ense_evasion_ec2_network_acl_deletion.json | 1 +
...e_evasion_guardduty_detector_deletion.json | 1 +
...sion_s3_bucket_configuration_deletion.json | 1 +
.../defense_evasion_waf_acl_deletion.json | 1 +
...asion_waf_rule_or_rule_group_deletion.json | 1 +
.../prepackaged_rules/elastic_endpoint.json | 4 +-
.../endpoint_adversary_behavior_detected.json | 4 +-
.../endpoint_cred_dumping_detected.json | 4 +-
.../endpoint_cred_dumping_prevented.json | 4 +-
.../endpoint_cred_manipulation_detected.json | 4 +-
.../endpoint_cred_manipulation_prevented.json | 4 +-
.../endpoint_exploit_detected.json | 4 +-
.../endpoint_exploit_prevented.json | 4 +-
.../endpoint_malware_detected.json | 4 +-
.../endpoint_malware_prevented.json | 4 +-
.../endpoint_permission_theft_detected.json | 4 +-
.../endpoint_permission_theft_prevented.json | 4 +-
.../endpoint_process_injection_detected.json | 4 +-
.../endpoint_process_injection_prevented.json | 4 +-
.../endpoint_ransomware_detected.json | 4 +-
.../endpoint_ransomware_prevented.json | 4 +-
.../execution_via_system_manager.json | 1 +
...ltration_ec2_snapshot_change_activity.json | 1 +
.../prepackaged_rules/external_alerts.json | 11 +++-
...pact_attempt_to_revoke_okta_api_token.json | 1 +
.../impact_cloudtrail_logging_updated.json | 1 +
.../impact_cloudwatch_log_group_deletion.json | 1 +
...impact_cloudwatch_log_stream_deletion.json | 1 +
.../impact_ec2_disable_ebs_encryption.json | 1 +
.../impact_iam_deactivate_mfa_device.json | 1 +
.../impact_iam_group_deletion.json | 1 +
.../impact_possible_okta_dos_attack.json | 1 +
.../impact_rds_cluster_deletion.json | 1 +
.../impact_rds_instance_cluster_stoppage.json | 1 +
.../rules/prepackaged_rules/index.ts | 4 ++
.../initial_access_console_login_root.json | 1 +
.../initial_access_password_recovery.json | 1 +
...icious_activity_reported_by_okta_user.json | 1 +
...a_attempt_to_deactivate_okta_mfa_rule.json | 1 +
.../okta_attempt_to_delete_okta_policy.json | 1 +
.../okta_attempt_to_modify_okta_mfa_rule.json | 1 +
...a_attempt_to_modify_okta_network_zone.json | 1 +
.../okta_attempt_to_modify_okta_policy.json | 1 +
..._or_delete_application_sign_on_policy.json | 1 +
...threat_detected_by_okta_threatinsight.json | 1 +
...tor_privileges_assigned_to_okta_group.json | 1 +
...ence_attempt_to_create_okta_api_token.json | 1 +
..._deactivate_mfa_for_okta_user_account.json | 1 +
...nce_attempt_to_deactivate_okta_policy.json | 1 +
...set_mfa_factors_for_okta_user_account.json | 1 +
.../persistence_ec2_network_acl_creation.json | 1 +
.../persistence_iam_group_creation.json | 1 +
.../persistence_rds_cluster_creation.json | 1 +
...ege_escalation_root_login_without_mfa.json | 1 +
...ege_escalation_updateassumerolepolicy.json | 1 +
68 files changed, 195 insertions(+), 35 deletions(-)
create mode 100644 x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_aws_iam_assume_role_brute_force.json
create mode 100644 x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_okta_brute_force_or_password_spraying.json
diff --git a/x-pack/plugins/lists/common/constants.ts b/x-pack/plugins/lists/common/constants.ts
index 7bb83cddd4331..df16085b53405 100644
--- a/x-pack/plugins/lists/common/constants.ts
+++ b/x-pack/plugins/lists/common/constants.ts
@@ -44,7 +44,7 @@ export const ENDPOINT_LIST_ITEM_URL = '/api/endpoint_list/items';
export const ENDPOINT_LIST_ID = 'endpoint_list';
/** The name of the single global space agnostic endpoint list */
-export const ENDPOINT_LIST_NAME = 'Elastic Endpoint Exception List';
+export const ENDPOINT_LIST_NAME = 'Elastic Endpoint Security Exception List';
/** The description of the single global space agnostic endpoint list */
-export const ENDPOINT_LIST_DESCRIPTION = 'Elastic Endpoint Exception List';
+export const ENDPOINT_LIST_DESCRIPTION = 'Elastic Endpoint Security Exception List';
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json
index 4437612a5056b..ee39661ee9b10 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS CloudTrail Log Created",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:CreateTrail and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_CreateTrail.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json
index e3e4b7b54c3b2..eb8523b797ddf 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json
@@ -9,6 +9,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempted Bypass of Okta MFA",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:user.mfa.attempt_bypass",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_aws_iam_assume_role_brute_force.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_aws_iam_assume_role_brute_force.json
new file mode 100644
index 0000000000000..ddc9e91782136
--- /dev/null
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_aws_iam_assume_role_brute_force.json
@@ -0,0 +1,49 @@
+{
+ "author": [
+ "Elastic"
+ ],
+ "description": "Identifies a high number of failed attempts to assume an AWS Identity and Access Management (IAM) role. IAM roles are used to delegate access to users or services. An adversary may attempt to enumerate IAM roles in order to determine if a role exists before attempting to assume or hijack the discovered role.",
+ "from": "now-20m",
+ "index": [
+ "filebeat-*"
+ ],
+ "language": "kuery",
+ "license": "Elastic License",
+ "name": "AWS IAM Brute Force of Assume Role Policy",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
+ "query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.action:UpdateAssumeRolePolicy and aws.cloudtrail.error_code:MalformedPolicyDocumentException and event.outcome:failure",
+ "references": [
+ "https://www.praetorian.com/blog/aws-iam-assume-role-vulnerabilities",
+ "https://rhinosecuritylabs.com/aws/assume-worst-aws-assume-role-enumeration/"
+ ],
+ "risk_score": 47,
+ "rule_id": "ea248a02-bc47-4043-8e94-2885b19b2636",
+ "severity": "medium",
+ "tags": [
+ "AWS",
+ "Elastic"
+ ],
+ "threat": [
+ {
+ "framework": "MITRE ATT&CK",
+ "tactic": {
+ "id": "TA0006",
+ "name": "Credential Access",
+ "reference": "https://attack.mitre.org/tactics/TA0006/"
+ },
+ "technique": [
+ {
+ "id": "T1110",
+ "name": "Brute Force",
+ "reference": "https://attack.mitre.org/techniques/T1110/"
+ }
+ ]
+ }
+ ],
+ "threshold": {
+ "field": "",
+ "value": 25
+ },
+ "type": "threshold",
+ "version": 1
+}
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json
index 1e268d2f6bf06..ecbf268550b6c 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS IAM User Addition to Group",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:AddUserToGroup and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/IAM/latest/APIReference/API_AddUserToGroup.html"
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_okta_brute_force_or_password_spraying.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_okta_brute_force_or_password_spraying.json
new file mode 100644
index 0000000000000..87f20525203f6
--- /dev/null
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_okta_brute_force_or_password_spraying.json
@@ -0,0 +1,51 @@
+{
+ "author": [
+ "Elastic"
+ ],
+ "description": "Identifies a high number of failed Okta user authentication attempts from a single IP address, which could be indicative of a brute force or password spraying attack. An adversary may attempt a brute force or password spraying attack to obtain unauthorized access to user accounts.",
+ "false_positives": [
+ "Automated processes that attempt to authenticate using expired credentials and unbounded retries may lead to false positives."
+ ],
+ "index": [
+ "filebeat-*"
+ ],
+ "language": "kuery",
+ "license": "Elastic License",
+ "name": "Okta Brute Force or Password Spraying Attack",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
+ "query": "event.module:okta and event.dataset:okta.system and event.category:authentication and event.outcome:failure",
+ "references": [
+ "https://developer.okta.com/docs/reference/api/system-log/",
+ "https://developer.okta.com/docs/reference/api/event-types/"
+ ],
+ "risk_score": 47,
+ "rule_id": "42bf698b-4738-445b-8231-c834ddefd8a0",
+ "severity": "medium",
+ "tags": [
+ "Elastic",
+ "Okta"
+ ],
+ "threat": [
+ {
+ "framework": "MITRE ATT&CK",
+ "tactic": {
+ "id": "TA0006",
+ "name": "Credential Access",
+ "reference": "https://attack.mitre.org/tactics/TA0006/"
+ },
+ "technique": [
+ {
+ "id": "T1110",
+ "name": "Brute Force",
+ "reference": "https://attack.mitre.org/techniques/T1110/"
+ }
+ ]
+ }
+ ],
+ "threshold": {
+ "field": "source.ip",
+ "value": 25
+ },
+ "type": "threshold",
+ "version": 1
+}
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json
index 740805f71a3cd..f570b7fb3e946 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json
@@ -15,6 +15,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS Access Secret in Secrets Manager",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.dataset:aws.cloudtrail and event.provider:secretsmanager.amazonaws.com and event.action:GetSecretValue",
"references": [
"https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json
index 2a74b8fecd809..78f4c9e853f64 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS CloudTrail Log Deleted",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteTrail and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_DeleteTrail.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json
index 5d6c1a93bab1d..f412ad9b2e2fd 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS CloudTrail Log Suspended",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:StopLogging and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_StopLogging.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json
index 9ac45ba872809..b76ea0944f855 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS CloudWatch Alarm Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteAlarms and event.dataset:aws.cloudtrail and event.provider:monitoring.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/cloudwatch/delete-alarms.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json
index 9ef37bd4e44e1..353067e6db833 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS Config Service Tampering",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.dataset: aws.cloudtrail and event.action: DeleteConfigRule and event.provider: config.amazonaws.com",
"references": [
"https://docs.aws.amazon.com/config/latest/developerguide/how-does-config-work.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json
index 0aed7aa5ad0ca..b70aa5cd11b52 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS Configuration Recorder Stopped",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:StopConfigurationRecorder and event.dataset:aws.cloudtrail and event.provider:config.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/configservice/stop-configuration-recorder.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json
index b1f6c42f6f61a..a1b0ec0f01d2a 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS EC2 Flow Log Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteFlowLogs and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/delete-flow-logs.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json
index 7dc4e33afcd36..21ce4e498ccaf 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS EC2 Network Access Control List Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:(DeleteNetworkAcl or DeleteNetworkAclEntry) and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/delete-network-acl.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json
index c456396c85cd8..989eff90aaf02 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS GuardDuty Detector Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteDetector and event.dataset:aws.cloudtrail and event.provider:guardduty.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/guardduty/delete-detector.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json
index 77f9e0f4a313c..b1e8d0cd0d3e1 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS S3 Bucket Configuration Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:(DeleteBucketPolicy or DeleteBucketReplication or DeleteBucketCors or DeleteBucketEncryption or DeleteBucketLifecycle) and event.dataset:aws.cloudtrail and event.provider:s3.amazonaws.com and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucketPolicy.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json
index 708f931a5f8ab..b2092dc78b012 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS WAF Access Control List Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteWebACL and event.dataset:aws.cloudtrail and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/waf-regional/delete-web-acl.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json
index 37dae51ec3125..ccec76b7f7974 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS WAF Rule or Rule Group Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.module:aws and event.dataset:aws.cloudtrail and event.action:(DeleteRule or DeleteRuleGroup) and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/waf/delete-rule-group.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json
index 396803086552e..e6a517d85db81 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Generates a detection alert each time an Elastic Endpoint alert is received. Enabling this rule allows you to immediately begin investigating your Elastic Endpoint alerts.",
+ "description": "Generates a detection alert each time an Elastic Endpoint Security alert is received. Enabling this rule allows you to immediately begin investigating your Elastic Endpoint alerts.",
"enabled": true,
"exceptions_list": [
{
@@ -18,7 +18,7 @@
"language": "kuery",
"license": "Elastic License",
"max_signals": 10000,
- "name": "Elastic Endpoint",
+ "name": "Elastic Endpoint Security",
"query": "event.kind:alert and event.module:(endpoint and not endgame)",
"risk_score": 47,
"risk_score_mapping": [
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json
index 5075630e24f29..16584a03a3c91 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected an Adversary Behavior. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected an Adversary Behavior. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Adversary Behavior - Detected - Elastic Endpoint",
+ "name": "Adversary Behavior - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and (event.action:rules_engine_event or endgame.event_subtype_full:rules_engine_event)",
"risk_score": 47,
"rule_id": "77a3c3df-8ec4-4da4-b758-878f551dee69",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json
index 4bf9ba8ec36e1..5717c490114b9 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected Credential Dumping. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected Credential Dumping. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Credential Dumping - Detected - Elastic Endpoint",
+ "name": "Credential Dumping - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:cred_theft_event or endgame.event_subtype_full:cred_theft_event)",
"risk_score": 73,
"rule_id": "571afc56-5ed9-465d-a2a9-045f099f6e7e",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json
index bed473b12b046..5c1b2cb02b841 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint prevented Credential Dumping. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security prevented Credential Dumping. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Credential Dumping - Prevented - Elastic Endpoint",
+ "name": "Credential Dumping - Prevented - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:cred_theft_event or endgame.event_subtype_full:cred_theft_event)",
"risk_score": 47,
"rule_id": "db8c33a8-03cd-4988-9e2c-d0a4863adb13",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json
index 02ba20bb59aec..16ad12a94ec40 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected Credential Manipulation. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected Credential Manipulation. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Credential Manipulation - Detected - Elastic Endpoint",
+ "name": "Credential Manipulation - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:token_manipulation_event or endgame.event_subtype_full:token_manipulation_event)",
"risk_score": 73,
"rule_id": "c0be5f31-e180-48ed-aa08-96b36899d48f",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json
index 128f8d5639d5d..9addcbf2fba30 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint prevented Credential Manipulation. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security prevented Credential Manipulation. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Credential Manipulation - Prevented - Elastic Endpoint",
+ "name": "Credential Manipulation - Prevented - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:token_manipulation_event or endgame.event_subtype_full:token_manipulation_event)",
"risk_score": 47,
"rule_id": "c9e38e64-3f4c-4bf3-ad48-0e61a60ea1fa",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json
index a11b839792b79..f51a38781c953 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected an Exploit. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected an Exploit. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Exploit - Detected - Elastic Endpoint",
+ "name": "Exploit - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:exploit_event or endgame.event_subtype_full:exploit_event)",
"risk_score": 73,
"rule_id": "2003cdc8-8d83-4aa5-b132-1f9a8eb48514",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json
index 2deb7bce3b203..8b96c5a63fbef 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint prevented an Exploit. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security prevented an Exploit. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Exploit - Prevented - Elastic Endpoint",
+ "name": "Exploit - Prevented - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:exploit_event or endgame.event_subtype_full:exploit_event)",
"risk_score": 47,
"rule_id": "2863ffeb-bf77-44dd-b7a5-93ef94b72036",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json
index d1389b21f2d7e..28ff73468deb4 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected Malware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected Malware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Malware - Detected - Elastic Endpoint",
+ "name": "Malware - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:file_classification_event or endgame.event_subtype_full:file_classification_event)",
"risk_score": 99,
"rule_id": "0a97b20f-4144-49ea-be32-b540ecc445de",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json
index b83bc259175c6..3d32abf2bf8f2 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint prevented Malware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security prevented Malware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Malware - Prevented - Elastic Endpoint",
+ "name": "Malware - Prevented - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:file_classification_event or endgame.event_subtype_full:file_classification_event)",
"risk_score": 73,
"rule_id": "3b382770-efbb-44f4-beed-f5e0a051b895",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json
index b81b9c67644c6..a89a7f7d5918c 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected Permission Theft. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected Permission Theft. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Permission Theft - Detected - Elastic Endpoint",
+ "name": "Permission Theft - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:token_protection_event or endgame.event_subtype_full:token_protection_event)",
"risk_score": 73,
"rule_id": "c3167e1b-f73c-41be-b60b-87f4df707fe3",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json
index b69598cffc230..fb9dbe3dadb17 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint prevented Permission Theft. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security prevented Permission Theft. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Permission Theft - Prevented - Elastic Endpoint",
+ "name": "Permission Theft - Prevented - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:token_protection_event or endgame.event_subtype_full:token_protection_event)",
"risk_score": 47,
"rule_id": "453f659e-0429-40b1-bfdb-b6957286e04b",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json
index 8299e11392398..e022d058d7560 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected Process Injection. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected Process Injection. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Process Injection - Detected - Elastic Endpoint",
+ "name": "Process Injection - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:kernel_shellcode_event or endgame.event_subtype_full:kernel_shellcode_event)",
"risk_score": 73,
"rule_id": "80c52164-c82a-402c-9964-852533d58be1",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json
index 237558ae372a8..2d189707293f1 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint prevented Process Injection. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security prevented Process Injection. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Process Injection - Prevented - Elastic Endpoint",
+ "name": "Process Injection - Prevented - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:kernel_shellcode_event or endgame.event_subtype_full:kernel_shellcode_event)",
"risk_score": 47,
"rule_id": "990838aa-a953-4f3e-b3cb-6ddf7584de9e",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json
index 4ead850c60e8f..077c20bca5d8e 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint detected Ransomware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security detected Ransomware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Ransomware - Detected - Elastic Endpoint",
+ "name": "Ransomware - Detected - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:ransomware_event or endgame.event_subtype_full:ransomware_event)",
"risk_score": 99,
"rule_id": "8cb4f625-7743-4dfb-ae1b-ad92be9df7bd",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json
index 25d167afa204c..b615fcb04895e 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json
@@ -2,7 +2,7 @@
"author": [
"Elastic"
],
- "description": "Elastic Endpoint prevented Ransomware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.",
+ "description": "Elastic Endpoint Security prevented Ransomware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.",
"from": "now-15m",
"index": [
"endgame-*"
@@ -10,7 +10,7 @@
"interval": "10m",
"language": "kuery",
"license": "Elastic License",
- "name": "Ransomware - Prevented - Elastic Endpoint",
+ "name": "Ransomware - Prevented - Elastic Endpoint Security",
"query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:ransomware_event or endgame.event_subtype_full:ransomware_event)",
"risk_score": 73,
"rule_id": "e3c5d5cb-41d5-4206-805c-f30561eae3ac",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json
index 90338f4460725..a9f8ee1af8bf6 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS Execution via System Manager",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:ssm.amazonaws.com and event.action:SendCommand and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/systems-manager/latest/userguide/ssm-plugins.html"
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json
index 04cc697cf36f9..25711afbb4c66 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS EC2 Snapshot Activity",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.action:ModifySnapshotAttribute",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/modify-snapshot-attribute.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json
index c8ebb2ed0e5d7..678ad9eb03b50 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json
@@ -2,7 +2,16 @@
"author": [
"Elastic"
],
- "description": "Generates a detection alert for each external alert written to the configured securitySolution:defaultIndex. Enabling this rule allows you to immediately begin investigating external alerts in the app.",
+ "description": "Generates a detection alert for each external alert written to the configured indices. Enabling this rule allows you to immediately begin investigating external alerts in the app.",
+ "index": [
+ "apm-*-transaction*",
+ "auditbeat-*",
+ "endgame-*",
+ "filebeat-*",
+ "logs-*",
+ "packetbeat-*",
+ "winlogbeat-*"
+ ],
"language": "kuery",
"license": "Elastic License",
"max_signals": 10000,
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json
index 0f4ded9fcfe87..27e50313c8f82 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Revoke Okta API Token",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:system.api_token.revoke",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json
index d969ef21027f0..0bafa56c9af49 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS CloudTrail Log Updated",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:UpdateTrail and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_UpdateTrail.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json
index d33593d4a44b2..74b5e0d93c441 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS CloudWatch Log Group Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteLogGroup and event.dataset:aws.cloudtrail and event.provider:logs.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/logs/delete-log-group.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json
index a1108dd07abdd..59c659117c098 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS CloudWatch Log Stream Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteLogStream and event.dataset:aws.cloudtrail and event.provider:logs.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/logs/delete-log-stream.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json
index 4681b475d92e7..10a1989ad6423 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS EC2 Encryption Disabled",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DisableEbsEncryptionByDefault and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json
index f873e3483a34f..4aa0b355171fe 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS IAM Deactivation of MFA Device",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeactivateMFADevice and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/deactivate-mfa-device.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json
index 23364c8b3aa28..25b300d33cce1 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS IAM Group Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:DeleteGroup and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/delete-group.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json
index 8c76f182442a5..9ca8b7ed21acb 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json
@@ -9,6 +9,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Possible Okta DoS Attack",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:(application.integration.rate_limit_exceeded or system.org.rate_limit.warning or system.org.rate_limit.violation or core.concurrency.org.limit.violation)",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json
index 88ec942b0e5e5..e8343f1b7b7c6 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS RDS Cluster Deletion",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:(DeleteDBCluster or DeleteGlobalCluster) and event.dataset:aws.cloudtrail and event.provider:rds.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/delete-db-cluster.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json
index 2c25781e24d19..8c4387e60d281 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS RDS Instance/Cluster Stoppage",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:(StopDBCluster or StopDBInstance) and event.dataset:aws.cloudtrail and event.provider:rds.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/stop-db-cluster.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts
index f2e2137eec41b..685c869630ca3 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts
@@ -210,6 +210,8 @@ import rule198 from './ml_cloudtrail_rare_error_code.json';
import rule199 from './ml_cloudtrail_rare_method_by_city.json';
import rule200 from './ml_cloudtrail_rare_method_by_country.json';
import rule201 from './ml_cloudtrail_rare_method_by_user.json';
+import rule202 from './credential_access_aws_iam_assume_role_brute_force.json';
+import rule203 from './credential_access_okta_brute_force_or_password_spraying.json';
export const rawRules = [
rule1,
@@ -413,4 +415,6 @@ export const rawRules = [
rule199,
rule200,
rule201,
+ rule202,
+ rule203,
];
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json
index 0f761f0d2a5f5..829d87c1964c9 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS Management Console Root Login",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:ConsoleLogin and event.module:aws and event.dataset:aws.cloudtrail and event.provider:signin.amazonaws.com and aws.cloudtrail.user_identity.type:Root and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_root-user.html"
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json
index 1042ce19a14c7..7429c69fc3174 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS IAM Password Recovery Requested",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:PasswordRecoveryRequested and event.provider:signin.amazonaws.com and event.outcome:success",
"references": [
"https://www.cadosecurity.com/2020/06/11/an-ongoing-aws-phishing-campaign/"
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json
index 5fa8a655c08bf..25bf7dd287d05 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Suspicious Activity Reported by Okta User",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:user.account.report_suspicious_activity_by_enduser",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json
index 737044d5a9bdc..1d15db83bb18e 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Deactivate Okta MFA Rule",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:policy.rule.deactivate",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json
index ea8ba7223095f..6df2ed6cb34a4 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Delete Okta Policy",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:policy.lifecycle.delete",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json
index dfe16f56da0e2..e276166f6130b 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Modify Okta MFA Rule",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:(policy.rule.update or policy.rule.delete)",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json
index 61c45f8e7d85e..bdfe7d25092ba 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Modify Okta Network Zone",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:(zone.update or zone.deactivate or zone.delete or network_zone.rule.disabled or zone.remove_blacklist)",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json
index a864b900a5998..e3e0d5fef7b2f 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Modify Okta Policy",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:policy.lifecycle.update",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json
index ff7546ac2f1a6..ad21ebe065f8c 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Modification or Removal of an Okta Application Sign-On Policy",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:(application.policy.sign_on.update or application.policy.sign_on.rule.delete)",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json
index 7a1b6e3d82d7c..e92cf3d67d313 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json
@@ -9,6 +9,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Threat Detected by Okta ThreatInsight",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:security.threat.detected",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json
index 70e7eb1706e1b..d5f3995fb8bcc 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Administrator Privileges Assigned to Okta Group",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:group.privilege.grant",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json
index 453580d580344..5f6c006c5d177 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Create Okta API Token",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:system.api_token.create",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json
index e5648285c5289..d3a66ef8d9c77 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Deactivate MFA for Okta User Account",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:user.mfa.factor.deactivate",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json
index 53da259042738..7104cace1c5d9 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Deactivate Okta Policy",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:policy.lifecycle.deactivate",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json
index f662c0c0b8eb6..c38f71d8e00a6 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json
@@ -12,6 +12,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "Attempt to Reset MFA Factors for Okta User Account",
+ "note": "The Okta Filebeat module must be enabled to use this rule.",
"query": "event.module:okta and event.dataset:okta.system and event.action:user.mfa.factor.reset_all",
"references": [
"https://developer.okta.com/docs/reference/api/system-log/",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json
index 911536d2567f4..99bb07fe9660e 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS EC2 Network Access Control List Creation",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:(CreateNetworkAcl or CreateNetworkAclEntry) and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/create-network-acl.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json
index 7c1c4d02737a6..9b2478b97fb38 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS IAM Group Creation",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:CreateGroup and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/create-group.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json
index c6e23acab0fb5..94a695a97a27a 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS RDS Cluster Creation",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.action:(CreateDBCluster or CreateGlobalCluster) and event.dataset:aws.cloudtrail and event.provider:rds.amazonaws.com and event.outcome:success",
"references": [
"https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/create-db-cluster.html",
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json
index 6db9e04edc0cb..74c5376100b2b 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS Root Login Without MFA",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:signin.amazonaws.com and event.action:ConsoleLogin and aws.cloudtrail.user_identity.type:Root and aws.cloudtrail.console_login.additional_eventdata.mfa_used:false and event.outcome:success",
"references": [
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_root-user.html"
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json
index 623f90716b2b6..7ce54b00f211c 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json
@@ -14,6 +14,7 @@
"language": "kuery",
"license": "Elastic License",
"name": "AWS IAM Assume Role Policy Update",
+ "note": "The AWS Filebeat module must be enabled to use this rule.",
"query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.action:UpdateAssumeRolePolicy and event.outcome:success",
"references": [
"https://labs.bishopfox.com/tech-blog/5-privesc-attack-vectors-in-aws"
From 9a22b95b97baed7d0fb0ab3cbc3e67a463cd9b55 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Cau=C3=AA=20Marcondes?=
<55978943+cauemarcondes@users.noreply.github.com>
Date: Thu, 23 Jul 2020 12:46:09 +0100
Subject: [PATCH 03/59] [APM] Custom link: Removing async check for callAPMApi
(#73004)
* removing async check for callAPMApi
* removing async check for callAPMApi
---
.../CustomizeUI/CustomLink/index.test.tsx | 32 ++++++++-----------
1 file changed, 14 insertions(+), 18 deletions(-)
diff --git a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx
index d633d466b6614..56c420878cdba 100644
--- a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx
+++ b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx
@@ -35,9 +35,8 @@ const data = [
];
describe('CustomLink', () => {
- let callApmApiSpy: jest.SpyInstance;
beforeAll(() => {
- callApmApiSpy = jest.spyOn(apmApi, 'callApmApi').mockReturnValue({});
+ jest.spyOn(apmApi, 'callApmApi').mockReturnValue({});
});
afterAll(() => {
jest.resetAllMocks();
@@ -103,7 +102,7 @@ describe('CustomLink', () => {
]);
});
- it('checks if create custom link button is available and working', async () => {
+ it('checks if create custom link button is available and working', () => {
const { queryByText, getByText } = render(
@@ -115,7 +114,6 @@ describe('CustomLink', () => {
act(() => {
fireEvent.click(getByText('Create custom link'));
});
- await wait(() => expect(callApmApiSpy).toHaveBeenCalled());
expect(queryByText('Create link')).toBeInTheDocument();
});
});
@@ -133,7 +131,7 @@ describe('CustomLink', () => {
});
});
- const openFlyout = async () => {
+ const openFlyout = () => {
const component = render(
@@ -145,15 +143,12 @@ describe('CustomLink', () => {
act(() => {
fireEvent.click(component.getByText('Create custom link'));
});
- await wait(() =>
- expect(component.queryByText('Create link')).toBeInTheDocument()
- );
- await wait(() => expect(callApmApiSpy).toHaveBeenCalled());
+ expect(component.queryByText('Create link')).toBeInTheDocument();
return component;
};
it('creates a custom link', async () => {
- const component = await openFlyout();
+ const component = openFlyout();
const labelInput = component.getByTestId('label');
act(() => {
fireEvent.change(labelInput, {
@@ -167,7 +162,7 @@ describe('CustomLink', () => {
});
});
await act(async () => {
- await wait(() => fireEvent.submit(component.getByText('Save')));
+ fireEvent.submit(component.getByText('Save'));
});
expect(saveCustomLinkSpy).toHaveBeenCalledTimes(1);
});
@@ -186,11 +181,12 @@ describe('CustomLink', () => {
act(() => {
fireEvent.click(editButtons[0]);
});
- expect(component.queryByText('Create link')).toBeInTheDocument();
+ await wait(() =>
+ expect(component.queryByText('Create link')).toBeInTheDocument()
+ );
await act(async () => {
- await wait(() => fireEvent.click(component.getByText('Delete')));
+ fireEvent.click(component.getByText('Delete'));
});
- expect(callApmApiSpy).toHaveBeenCalled();
expect(refetch).toHaveBeenCalled();
});
@@ -200,8 +196,8 @@ describe('CustomLink', () => {
fireEvent.click(component.getByText('Add another filter'));
}
};
- it('checks if add filter button is disabled after all elements have been added', async () => {
- const component = await openFlyout();
+ it('checks if add filter button is disabled after all elements have been added', () => {
+ const component = openFlyout();
expect(component.getAllByText('service.name').length).toEqual(1);
addFilterField(component, 1);
expect(component.getAllByText('service.name').length).toEqual(2);
@@ -211,8 +207,8 @@ describe('CustomLink', () => {
addFilterField(component, 2);
expect(component.getAllByText('service.name').length).toEqual(4);
});
- it('removes items already selected', async () => {
- const component = await openFlyout();
+ it('removes items already selected', () => {
+ const component = openFlyout();
const addFieldAndCheck = (
fieldName: string,
From 06f142d586653db33e1b1b52aa71d95a91b341c3 Mon Sep 17 00:00:00 2001
From: Nicolas Chaulet
Date: Thu, 23 Jul 2020 08:06:16 -0400
Subject: [PATCH 04/59] [Ingest Manager] Fix config rollout move to limit
concurrent config change instead of config per second (#72931)
---
.../ingest_manager/common/types/index.ts | 3 +-
x-pack/plugins/ingest_manager/server/index.ts | 3 +-
.../agents/checkin/rxjs_utils.test.ts | 45 ++++++++++++++++++
.../services/agents/checkin/rxjs_utils.ts | 47 +++++++------------
.../server/services/agents/checkin/state.ts | 17 +++++--
.../agents/checkin/state_new_actions.ts | 10 ++--
6 files changed, 80 insertions(+), 45 deletions(-)
create mode 100644 x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts
diff --git a/x-pack/plugins/ingest_manager/common/types/index.ts b/x-pack/plugins/ingest_manager/common/types/index.ts
index d7edc04a35799..7acef263f973a 100644
--- a/x-pack/plugins/ingest_manager/common/types/index.ts
+++ b/x-pack/plugins/ingest_manager/common/types/index.ts
@@ -22,8 +22,7 @@ export interface IngestManagerConfigType {
host?: string;
ca_sha256?: string;
};
- agentConfigRollupRateLimitIntervalMs: number;
- agentConfigRollupRateLimitRequestPerInterval: number;
+ agentConfigRolloutConcurrency: number;
};
}
diff --git a/x-pack/plugins/ingest_manager/server/index.ts b/x-pack/plugins/ingest_manager/server/index.ts
index 6c72218abc531..40e0153a26581 100644
--- a/x-pack/plugins/ingest_manager/server/index.ts
+++ b/x-pack/plugins/ingest_manager/server/index.ts
@@ -35,8 +35,7 @@ export const config = {
host: schema.maybe(schema.string()),
ca_sha256: schema.maybe(schema.string()),
}),
- agentConfigRollupRateLimitIntervalMs: schema.number({ defaultValue: 5000 }),
- agentConfigRollupRateLimitRequestPerInterval: schema.number({ defaultValue: 50 }),
+ agentConfigRolloutConcurrency: schema.number({ defaultValue: 10 }),
}),
}),
};
diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts
new file mode 100644
index 0000000000000..70207dcf325c4
--- /dev/null
+++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts
@@ -0,0 +1,45 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import * as Rx from 'rxjs';
+import { share } from 'rxjs/operators';
+import { createSubscriberConcurrencyLimiter } from './rxjs_utils';
+
+function createSpyObserver(o: Rx.Observable): [Rx.Subscription, jest.Mock] {
+ const spy = jest.fn();
+ const observer = o.subscribe(spy);
+ return [observer, spy];
+}
+
+describe('createSubscriberConcurrencyLimiter', () => {
+ it('should not publish to more than n concurrent subscriber', async () => {
+ const subject = new Rx.Subject();
+ const sharedObservable = subject.pipe(share());
+
+ const limiter = createSubscriberConcurrencyLimiter(2);
+
+ const [observer1, spy1] = createSpyObserver(sharedObservable.pipe(limiter()));
+ const [observer2, spy2] = createSpyObserver(sharedObservable.pipe(limiter()));
+ const [observer3, spy3] = createSpyObserver(sharedObservable.pipe(limiter()));
+ const [observer4, spy4] = createSpyObserver(sharedObservable.pipe(limiter()));
+ subject.next('test1');
+
+ expect(spy1).toBeCalled();
+ expect(spy2).toBeCalled();
+ expect(spy3).not.toBeCalled();
+ expect(spy4).not.toBeCalled();
+
+ observer1.unsubscribe();
+ expect(spy3).toBeCalled();
+ expect(spy4).not.toBeCalled();
+
+ observer2.unsubscribe();
+ expect(spy4).toBeCalled();
+
+ observer3.unsubscribe();
+ observer4.unsubscribe();
+ });
+});
diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts
index a806169019a1e..dc0ed35207e46 100644
--- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts
+++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts
@@ -43,34 +43,23 @@ export const toPromiseAbortable = (
}
});
-export function createLimiter(ratelimitIntervalMs: number, ratelimitRequestPerInterval: number) {
- function createCurrentInterval() {
- return {
- startedAt: Rx.asyncScheduler.now(),
- numRequests: 0,
- };
- }
-
- let currentInterval: { startedAt: number; numRequests: number } = createCurrentInterval();
+export function createSubscriberConcurrencyLimiter(maxConcurrency: number) {
let observers: Array<[Rx.Subscriber, any]> = [];
- let timerSubscription: Rx.Subscription | undefined;
+ let activeObservers: Array> = [];
- function createTimeout() {
- if (timerSubscription) {
+ function processNext() {
+ if (activeObservers.length >= maxConcurrency) {
return;
}
- timerSubscription = Rx.asyncScheduler.schedule(() => {
- timerSubscription = undefined;
- currentInterval = createCurrentInterval();
- for (const [waitingObserver, value] of observers) {
- if (currentInterval.numRequests >= ratelimitRequestPerInterval) {
- createTimeout();
- continue;
- }
- currentInterval.numRequests++;
- waitingObserver.next(value);
- }
- }, ratelimitIntervalMs);
+ const observerValuePair = observers.shift();
+
+ if (!observerValuePair) {
+ return;
+ }
+
+ const [observer, value] = observerValuePair;
+ activeObservers.push(observer);
+ observer.next(value);
}
return function limit(): Rx.MonoTypeOperatorFunction {
@@ -78,14 +67,8 @@ export function createLimiter(ratelimitIntervalMs: number, ratelimitRequestPerIn
new Rx.Observable((observer) => {
const subscription = observable.subscribe({
next(value) {
- if (currentInterval.numRequests < ratelimitRequestPerInterval) {
- currentInterval.numRequests++;
- observer.next(value);
- return;
- }
-
observers = [...observers, [observer, value]];
- createTimeout();
+ processNext();
},
error(err) {
observer.error(err);
@@ -96,8 +79,10 @@ export function createLimiter(ratelimitIntervalMs: number, ratelimitRequestPerIn
});
return () => {
+ activeObservers = activeObservers.filter((o) => o !== observer);
observers = observers.filter((o) => o[0] !== observer);
subscription.unsubscribe();
+ processNext();
};
});
};
diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts
index 69d61171b21fc..63f22b82611c2 100644
--- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts
+++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts
@@ -13,9 +13,11 @@ import { AGENT_UPDATE_LAST_CHECKIN_INTERVAL_MS } from '../../../constants';
function agentCheckinStateFactory() {
const agentConnected = agentCheckinStateConnectedAgentsFactory();
- const newActions = agentCheckinStateNewActionsFactory();
+ let newActions: ReturnType;
let interval: NodeJS.Timeout;
+
function start() {
+ newActions = agentCheckinStateNewActionsFactory();
interval = setInterval(async () => {
try {
await agentConnected.updateLastCheckinAt();
@@ -31,15 +33,20 @@ function agentCheckinStateFactory() {
}
}
return {
- subscribeToNewActions: (
+ subscribeToNewActions: async (
soClient: SavedObjectsClientContract,
agent: Agent,
options?: { signal: AbortSignal }
- ) =>
- agentConnected.wrapPromise(
+ ) => {
+ if (!newActions) {
+ throw new Error('Agent checkin state not initialized');
+ }
+
+ return agentConnected.wrapPromise(
agent.id,
newActions.subscribeToNewActions(soClient, agent, options)
- ),
+ );
+ },
start,
stop,
};
diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts
index 5ceb774a1946c..53270afe453c4 100644
--- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts
+++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts
@@ -28,7 +28,7 @@ import * as APIKeysService from '../../api_keys';
import { AGENT_SAVED_OBJECT_TYPE, AGENT_UPDATE_ACTIONS_INTERVAL_MS } from '../../../constants';
import { createAgentAction, getNewActionsSince } from '../actions';
import { appContextService } from '../../app_context';
-import { toPromiseAbortable, AbortError, createLimiter } from './rxjs_utils';
+import { toPromiseAbortable, AbortError, createSubscriberConcurrencyLimiter } from './rxjs_utils';
function getInternalUserSOClient() {
const fakeRequest = ({
@@ -134,9 +134,8 @@ export function agentCheckinStateNewActionsFactory() {
const agentConfigs$ = new Map>();
const newActions$ = createNewActionsSharedObservable();
// Rx operators
- const rateLimiter = createLimiter(
- appContextService.getConfig()?.fleet.agentConfigRollupRateLimitIntervalMs || 5000,
- appContextService.getConfig()?.fleet.agentConfigRollupRateLimitRequestPerInterval || 50
+ const concurrencyLimiter = createSubscriberConcurrencyLimiter(
+ appContextService.getConfig()?.fleet.agentConfigRolloutConcurrency ?? 10
);
async function subscribeToNewActions(
@@ -155,10 +154,11 @@ export function agentCheckinStateNewActionsFactory() {
if (!agentConfig$) {
throw new Error(`Invalid state no observable for config ${configId}`);
}
+
const stream$ = agentConfig$.pipe(
timeout(appContextService.getConfig()?.fleet.pollingRequestTimeout || 0),
filter((config) => shouldCreateAgentConfigAction(agent, config)),
- rateLimiter(),
+ concurrencyLimiter(),
mergeMap((config) => createAgentActionFromConfig(soClient, agent, config)),
merge(newActions$),
mergeMap(async (data) => {
From 49782f93480b8d016ebf36bda18a6d855be5aff2 Mon Sep 17 00:00:00 2001
From: Pierre Gayvallet
Date: Thu, 23 Jul 2020 14:48:13 +0200
Subject: [PATCH 05/59] delete legacy apm_oss plugin (#73016)
---
src/legacy/core_plugins/apm_oss/index.d.ts | 22 -------
src/legacy/core_plugins/apm_oss/index.js | 60 --------------------
src/legacy/core_plugins/apm_oss/package.json | 4 --
src/legacy/server/kbn_server.d.ts | 2 -
4 files changed, 88 deletions(-)
delete mode 100644 src/legacy/core_plugins/apm_oss/index.d.ts
delete mode 100644 src/legacy/core_plugins/apm_oss/index.js
delete mode 100644 src/legacy/core_plugins/apm_oss/package.json
diff --git a/src/legacy/core_plugins/apm_oss/index.d.ts b/src/legacy/core_plugins/apm_oss/index.d.ts
deleted file mode 100644
index 86fe4e0350dce..0000000000000
--- a/src/legacy/core_plugins/apm_oss/index.d.ts
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export interface ApmOssPlugin {
- indexPatterns: string[];
-}
diff --git a/src/legacy/core_plugins/apm_oss/index.js b/src/legacy/core_plugins/apm_oss/index.js
deleted file mode 100644
index b7ab6797c0de9..0000000000000
--- a/src/legacy/core_plugins/apm_oss/index.js
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import _ from 'lodash';
-
-export default function apmOss(kibana) {
- return new kibana.Plugin({
- id: 'apm_oss',
-
- config(Joi) {
- return Joi.object({
- // enable plugin
- enabled: Joi.boolean().default(true),
-
- // Kibana Index pattern
- indexPattern: Joi.string().default('apm-*'),
-
- // ES Indices
- sourcemapIndices: Joi.string().default('apm-*'),
- errorIndices: Joi.string().default('apm-*'),
- transactionIndices: Joi.string().default('apm-*'),
- spanIndices: Joi.string().default('apm-*'),
- metricsIndices: Joi.string().default('apm-*'),
- onboardingIndices: Joi.string().default('apm-*'),
- }).default();
- },
-
- init(server) {
- server.expose(
- 'indexPatterns',
- _.uniq(
- [
- 'sourcemapIndices',
- 'errorIndices',
- 'transactionIndices',
- 'spanIndices',
- 'metricsIndices',
- 'onboardingIndices',
- ].map((type) => server.config().get(`apm_oss.${type}`))
- )
- );
- },
- });
-}
diff --git a/src/legacy/core_plugins/apm_oss/package.json b/src/legacy/core_plugins/apm_oss/package.json
deleted file mode 100644
index 4ca161f293e79..0000000000000
--- a/src/legacy/core_plugins/apm_oss/package.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "name": "apm_oss",
- "version": "kibana"
-}
diff --git a/src/legacy/server/kbn_server.d.ts b/src/legacy/server/kbn_server.d.ts
index 40996500bfbe0..9bb091383ab13 100644
--- a/src/legacy/server/kbn_server.d.ts
+++ b/src/legacy/server/kbn_server.d.ts
@@ -43,7 +43,6 @@ import {
import { LegacyConfig, ILegacyService, ILegacyInternals } from '../../core/server/legacy';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { UiPlugins } from '../../core/server/plugins';
-import { ApmOssPlugin } from '../core_plugins/apm_oss';
import { CallClusterWithRequest, ElasticsearchPlugin } from '../core_plugins/elasticsearch';
import { UsageCollectionSetup } from '../../plugins/usage_collection/server';
import { UiSettingsServiceFactoryOptions } from '../../legacy/ui/ui_settings/ui_settings_service_factory';
@@ -62,7 +61,6 @@ declare module 'hapi' {
elasticsearch: ElasticsearchPlugin;
kibana: any;
spaces: any;
- apm_oss: ApmOssPlugin;
// add new plugin types here
}
From 304445f007899681572a888cb45d35ef7e102d7c Mon Sep 17 00:00:00 2001
From: Vadim Dalecky
Date: Thu, 23 Jul 2020 06:27:06 -0700
Subject: [PATCH 06/59] =?UTF-8?q?fix:=20=F0=9F=90=9B=20don't=20show=20acti?=
=?UTF-8?q?ons=20if=20Discover=20app=20is=20disabled=20(#73017)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* fix: 🐛 don't show actions if Discover app is disabled
* style: collapse ifs
---
.../explore_data/abstract_explore_data_action.ts | 6 +++++-
.../explore_data/explore_data_chart_action.test.ts | 13 +++++++++++++
.../explore_data_context_menu_action.test.ts | 13 +++++++++++++
3 files changed, 31 insertions(+), 1 deletion(-)
diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts
index 3aec0ce238c3c..434d38c76d428 100644
--- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts
+++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts
@@ -49,12 +49,16 @@ export abstract class AbstractExploreDataAction {
if (!embeddable) return false;
+ const { core, plugins } = this.params.start();
+ const { capabilities } = core.application;
+
+ if (capabilities.discover && !capabilities.discover.show) return false;
+ if (!plugins.discover.urlGenerator) return false;
const isDashboardOnlyMode = !!this.params
.start()
.plugins.kibanaLegacy?.dashboardConfig.getHideWriteControls();
if (isDashboardOnlyMode) return false;
- if (!this.params.start().plugins.discover.urlGenerator) return false;
if (!shared.hasExactlyOneIndexPattern(embeddable)) return false;
if (embeddable.getInput().viewMode !== ViewMode.VIEW) return false;
return true;
diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts
index 6c3ed7a2fe778..14cd48ae1f509 100644
--- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts
+++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts
@@ -196,6 +196,19 @@ describe('"Explore underlying data" panel action', () => {
expect(isCompatible).toBe(false);
});
+
+ test('returns false if Discover app is disabled', async () => {
+ const { action, context, core } = setup();
+
+ core.application.capabilities = { ...core.application.capabilities };
+ (core.application.capabilities as any).discover = {
+ show: false,
+ };
+
+ const isCompatible = await action.isCompatible(context);
+
+ expect(isCompatible).toBe(false);
+ });
});
describe('getHref()', () => {
diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts
index 1422cc871cde8..68253655af890 100644
--- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts
+++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts
@@ -179,6 +179,19 @@ describe('"Explore underlying data" panel action', () => {
expect(isCompatible).toBe(false);
});
+
+ test('returns false if Discover app is disabled', async () => {
+ const { action, context, core } = setup();
+
+ core.application.capabilities = { ...core.application.capabilities };
+ (core.application.capabilities as any).discover = {
+ show: false,
+ };
+
+ const isCompatible = await action.isCompatible(context);
+
+ expect(isCompatible).toBe(false);
+ });
});
describe('getHref()', () => {
From 7280b69e9942866489da7f2f03376f7508bc74c3 Mon Sep 17 00:00:00 2001
From: Madison Caldwell
Date: Thu, 23 Jul 2020 09:54:08 -0400
Subject: [PATCH 07/59] [Security Solution][Exceptions] Preserve rule
exceptions when updating rule (#72977)
* Send exceptions_list with rule edit
* Handle exceptions list checkbox
* whoops
* Don't lose data when associating with endpoint list
* syntax
* Filter out the endpoint lists when disassociating
* Add tests
* Refactor per PR suggestions
Co-authored-by: Elastic Machine
---
.../rules/all/__mocks__/mock.ts | 7 +++
.../rules/create/helpers.test.ts | 57 +++++++++++++++++++
.../detection_engine/rules/create/helpers.ts | 22 +++++--
.../detection_engine/rules/edit/index.tsx | 3 +-
4 files changed, 84 insertions(+), 5 deletions(-)
diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts
index 10d969ae7e6e8..14cf476e66563 100644
--- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts
+++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts
@@ -6,6 +6,7 @@
import { esFilters } from '../../../../../../../../../../src/plugins/data/public';
import { Rule, RuleError } from '../../../../../containers/detection_engine/rules';
+import { List } from '../../../../../../../common/detection_engine/schemas/types';
import { AboutStepRule, ActionsStepRule, DefineStepRule, ScheduleStepRule } from '../../types';
import { FieldValueQueryBar } from '../../../../../components/rules/query_bar';
@@ -240,3 +241,9 @@ export const mockRules: Rule[] = [
mockRule('abe6c564-050d-45a5-aaf0-386c37dd1f61'),
mockRule('63f06f34-c181-4b2d-af35-f2ace572a1ee'),
];
+
+export const mockExceptionsList: List = {
+ namespace_type: 'single',
+ id: '75cd4380-cc5e-11ea-9101-5b34f44aeb44',
+ type: 'detection',
+};
diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts
index 745518b90df00..6458d2faa2468 100644
--- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts
+++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts
@@ -4,7 +4,10 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { List } from '../../../../../../common/detection_engine/schemas/types';
+import { ENDPOINT_LIST_ID } from '../../../../../shared_imports';
import { NewRule } from '../../../../containers/detection_engine/rules';
+
import {
DefineStepRuleJson,
ScheduleStepRuleJson,
@@ -26,12 +29,19 @@ import {
} from './helpers';
import {
mockDefineStepRule,
+ mockExceptionsList,
mockQueryBar,
mockScheduleStepRule,
mockAboutStepRule,
mockActionsStepRule,
} from '../all/__mocks__/mock';
+const ENDPOINT_LIST = {
+ id: ENDPOINT_LIST_ID,
+ namespace_type: 'agnostic',
+ type: 'endpoint',
+} as List;
+
describe('helpers', () => {
describe('getTimeTypeValue', () => {
test('returns timeObj with value 0 if no time value found', () => {
@@ -373,6 +383,53 @@ describe('helpers', () => {
expect(result).toEqual(expected);
});
+ test('returns formatted object with endpoint exceptions_list', () => {
+ const result: AboutStepRuleJson = formatAboutStepData(
+ {
+ ...mockData,
+ isAssociatedToEndpointList: true,
+ },
+ []
+ );
+ expect(result.exceptions_list).toEqual([
+ { id: ENDPOINT_LIST_ID, namespace_type: 'agnostic', type: 'endpoint' },
+ ]);
+ });
+
+ test('returns formatted object with detections exceptions_list', () => {
+ const result: AboutStepRuleJson = formatAboutStepData(mockData, [mockExceptionsList]);
+ expect(result.exceptions_list).toEqual([mockExceptionsList]);
+ });
+
+ test('returns formatted object with both exceptions_lists', () => {
+ const result: AboutStepRuleJson = formatAboutStepData(
+ {
+ ...mockData,
+ isAssociatedToEndpointList: true,
+ },
+ [mockExceptionsList]
+ );
+ expect(result.exceptions_list).toEqual([ENDPOINT_LIST, mockExceptionsList]);
+ });
+
+ test('returns formatted object with pre-existing exceptions lists', () => {
+ const exceptionsLists: List[] = [ENDPOINT_LIST, mockExceptionsList];
+ const result: AboutStepRuleJson = formatAboutStepData(
+ {
+ ...mockData,
+ isAssociatedToEndpointList: true,
+ },
+ exceptionsLists
+ );
+ expect(result.exceptions_list).toEqual(exceptionsLists);
+ });
+
+ test('returns formatted object with pre-existing endpoint exceptions list disabled', () => {
+ const exceptionsLists: List[] = [ENDPOINT_LIST, mockExceptionsList];
+ const result: AboutStepRuleJson = formatAboutStepData(mockData, exceptionsLists);
+ expect(result.exceptions_list).toEqual([mockExceptionsList]);
+ });
+
test('returns formatted object with empty falsePositive and references filtered out', () => {
const mockStepData = {
...mockData,
diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts
index 38f7836f678f9..a972afbd8c0c5 100644
--- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts
+++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts
@@ -12,8 +12,9 @@ import { NOTIFICATION_THROTTLE_NO_ACTIONS } from '../../../../../../common/const
import { transformAlertToRuleAction } from '../../../../../../common/detection_engine/transform_actions';
import { RuleType } from '../../../../../../common/detection_engine/types';
import { isMlRule } from '../../../../../../common/machine_learning/helpers';
+import { List } from '../../../../../../common/detection_engine/schemas/types';
import { ENDPOINT_LIST_ID } from '../../../../../shared_imports';
-import { NewRule } from '../../../../containers/detection_engine/rules';
+import { NewRule, Rule } from '../../../../containers/detection_engine/rules';
import {
AboutStepRule,
@@ -146,7 +147,10 @@ export const formatScheduleStepData = (scheduleData: ScheduleStepRule): Schedule
};
};
-export const formatAboutStepData = (aboutStepData: AboutStepRule): AboutStepRuleJson => {
+export const formatAboutStepData = (
+ aboutStepData: AboutStepRule,
+ exceptionsList?: List[]
+): AboutStepRuleJson => {
const {
author,
falsePositives,
@@ -162,6 +166,10 @@ export const formatAboutStepData = (aboutStepData: AboutStepRule): AboutStepRule
timestampOverride,
...rest
} = aboutStepData;
+
+ const detectionExceptionLists =
+ exceptionsList != null ? exceptionsList.filter((list) => list.type !== 'endpoint') : [];
+
const resp = {
author: author.filter((item) => !isEmpty(item)),
...(isBuildingBlock ? { building_block_type: 'default' } : {}),
@@ -169,8 +177,13 @@ export const formatAboutStepData = (aboutStepData: AboutStepRule): AboutStepRule
? {
exceptions_list: [
{ id: ENDPOINT_LIST_ID, namespace_type: 'agnostic', type: 'endpoint' },
+ ...detectionExceptionLists,
] as AboutStepRuleJson['exceptions_list'],
}
+ : exceptionsList != null
+ ? {
+ exceptions_list: [...detectionExceptionLists],
+ }
: {}),
false_positives: falsePositives.filter((item) => !isEmpty(item)),
references: references.filter((item) => !isEmpty(item)),
@@ -218,11 +231,12 @@ export const formatRule = (
defineStepData: DefineStepRule,
aboutStepData: AboutStepRule,
scheduleData: ScheduleStepRule,
- actionsData: ActionsStepRule
+ actionsData: ActionsStepRule,
+ rule?: Rule | null
): NewRule =>
deepmerge.all([
formatDefineStepData(defineStepData),
- formatAboutStepData(aboutStepData),
+ formatAboutStepData(aboutStepData, rule?.exceptions_list),
formatScheduleStepData(scheduleData),
formatActionsStepData(actionsData),
]) as NewRule;
diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx
index 0900cdb8f4789..3cc874b85ecf3 100644
--- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx
+++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx
@@ -273,7 +273,8 @@ const EditRulePageComponent: FC = () => {
: myScheduleRuleForm.data) as ScheduleStepRule,
(activeFormId === RuleStep.ruleActions
? activeForm.data
- : myActionsRuleForm.data) as ActionsStepRule
+ : myActionsRuleForm.data) as ActionsStepRule,
+ rule
),
...(ruleId ? { id: ruleId } : {}),
});
From 1ee3cdb03db5a94636b90d5ffefb4ef868a7bb81 Mon Sep 17 00:00:00 2001
From: Stratoula Kalafateli
Date: Thu, 23 Jul 2020 17:00:16 +0300
Subject: [PATCH 08/59] [Functional Tests] Unskip tsvb timeseries test (#73011)
* [Functional Tests] Unskip tsvb timeseries test
* Add retry to dropdown selection when element is not found to headless mode
---
test/functional/apps/visualize/_tsvb_time_series.ts | 2 +-
test/functional/page_objects/visual_builder_page.ts | 6 ++++--
2 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/test/functional/apps/visualize/_tsvb_time_series.ts b/test/functional/apps/visualize/_tsvb_time_series.ts
index e0d512c1f4861..c048755fc5fbe 100644
--- a/test/functional/apps/visualize/_tsvb_time_series.ts
+++ b/test/functional/apps/visualize/_tsvb_time_series.ts
@@ -107,7 +107,7 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) {
expect(actualCount).to.be(expectedLegendValue);
});
- it.skip('should show the correct count in the legend with "Human readable" duration formatter', async () => {
+ it('should show the correct count in the legend with "Human readable" duration formatter', async () => {
await visualBuilder.clickSeriesOption();
await visualBuilder.changeDataFormatter('Duration');
await visualBuilder.setDurationFormatterSettings({ to: 'Human readable' });
diff --git a/test/functional/page_objects/visual_builder_page.ts b/test/functional/page_objects/visual_builder_page.ts
index 4a4beca959540..0db8cac0f0758 100644
--- a/test/functional/page_objects/visual_builder_page.ts
+++ b/test/functional/page_objects/visual_builder_page.ts
@@ -279,8 +279,10 @@ export function VisualBuilderPageProvider({ getService, getPageObjects }: FtrPro
decimalPlaces?: string;
}) {
if (from) {
- const fromCombobox = await find.byCssSelector('[id$="from-row"] .euiComboBox');
- await comboBox.setElement(fromCombobox, from, { clickWithMouse: true });
+ await retry.try(async () => {
+ const fromCombobox = await find.byCssSelector('[id$="from-row"] .euiComboBox');
+ await comboBox.setElement(fromCombobox, from, { clickWithMouse: true });
+ });
}
if (to) {
const toCombobox = await find.byCssSelector('[id$="to-row"] .euiComboBox');
From 2cf37a53266c8407e6993f7085e8f204f1ad4780 Mon Sep 17 00:00:00 2001
From: Matthew Kime
Date: Thu, 23 Jul 2020 09:34:56 -0500
Subject: [PATCH 09/59] Don't skip index pattern creation test (#73032)
---
.../functional/apps/management/_create_index_pattern_wizard.js | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/test/functional/apps/management/_create_index_pattern_wizard.js b/test/functional/apps/management/_create_index_pattern_wizard.js
index 160b052e70d30..9760527371408 100644
--- a/test/functional/apps/management/_create_index_pattern_wizard.js
+++ b/test/functional/apps/management/_create_index_pattern_wizard.js
@@ -25,8 +25,7 @@ export default function ({ getService, getPageObjects }) {
const es = getService('legacyEs');
const PageObjects = getPageObjects(['settings', 'common']);
- // Flaky: https://github.com/elastic/kibana/issues/71501
- describe.skip('"Create Index Pattern" wizard', function () {
+ describe('"Create Index Pattern" wizard', function () {
before(async function () {
// delete .kibana index and then wait for Kibana to re-create it
await kibanaServer.uiSettings.replace({});
From 15ccdc36cae57f34aa070818776fc453fdbdcb68 Mon Sep 17 00:00:00 2001
From: Tyler Smalley
Date: Thu, 23 Jul 2020 07:50:30 -0700
Subject: [PATCH 10/59] [test] Skips flaky uptime test
Signed-off-by: Tyler Smalley
---
x-pack/test/functional/apps/uptime/settings.ts | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/x-pack/test/functional/apps/uptime/settings.ts b/x-pack/test/functional/apps/uptime/settings.ts
index 1286a9940c02c..a258cccffbd8c 100644
--- a/x-pack/test/functional/apps/uptime/settings.ts
+++ b/x-pack/test/functional/apps/uptime/settings.ts
@@ -16,8 +16,8 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
const es = getService('es');
- // Flaky https://github.com/elastic/kibana/issues/60866
- describe('uptime settings page', () => {
+ // Flaky https://github.com/elastic/kibana/issues/72994
+ describe.skip('uptime settings page', () => {
beforeEach('navigate to clean app root', async () => {
// make 10 checks
await makeChecks(es, 'myMonitor', 1, 1, 1);
From 2d9eaf013bae31adc72578065982833ea6934f0e Mon Sep 17 00:00:00 2001
From: Daniil Suleiman <31325372+sulemanof@users.noreply.github.com>
Date: Thu, 23 Jul 2020 18:00:52 +0300
Subject: [PATCH 11/59] Fix view saved search through a visualization (#73040)
---
.../components/sidebar/sidebar_title.tsx | 9 ++++++--
.../apps/visualize/_linked_saved_searches.ts | 22 +++++++++++++++++++
2 files changed, 29 insertions(+), 2 deletions(-)
diff --git a/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx b/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx
index 6713c2ce2391b..11ceb5885dd31 100644
--- a/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx
+++ b/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx
@@ -65,7 +65,7 @@ export function LinkedSearch({ savedSearch, eventEmitter }: LinkedSearchProps) {
}, [eventEmitter]);
const onClickViewInDiscover = useCallback(() => {
application.navigateToApp('discover', {
- path: `#/${savedSearch.id}`,
+ path: `#/view/${savedSearch.id}`,
});
}, [application, savedSearch.id]);
@@ -128,7 +128,12 @@ export function LinkedSearch({ savedSearch, eventEmitter }: LinkedSearchProps) {
-
+
-
+
= ({ onClose }) => {
-
+
= ({
options={[
{
id: 'logs',
- label: i18n.translate(
- 'xpack.ingestManager.agentConfigForm.monitoringLogsFieldLabel',
- { defaultMessage: 'Collect agent logs' }
+ label: (
+ <>
+ {' '}
+
+ >
),
},
{
id: 'metrics',
- label: i18n.translate(
- 'xpack.ingestManager.agentConfigForm.monitoringMetricsFieldLabel',
- { defaultMessage: 'Collect agent metrics' }
+ label: (
+ <>
+ {' '}
+
+ >
),
},
]}
@@ -315,16 +347,14 @@ export const AgentConfigForm: React.FunctionComponent = ({
{!isEditing ? (
-
-
+
}
>
-
= ({
)}
position="right"
type="iInCircle"
+ color="subdued"
/>
>
}
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx
index 6cf60fe1dc507..9c2d09b02665f 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx
@@ -72,7 +72,7 @@ export const ConfigYamlFlyout = memo<{ configId: string; onClose: () => void }>(
-
+
= ({
/>
+
-
+
+
+
);
@@ -95,7 +99,7 @@ export const CreateAgentConfigFlyout: React.FunctionComponent = ({
- onClose()} flush="left">
+ onClose()} flush="left">
= ({
-
+
= ({ onCl
-
+
= ({
-
+
-
+
From 52a1b05623826650ff0aff67d0428b78caa29a46 Mon Sep 17 00:00:00 2001
From: Jen Huang
Date: Thu, 23 Jul 2020 10:47:53 -0700
Subject: [PATCH 25/59] Change copy to Agent ID (#72953)
---
.../fleet/agent_details_page/components/agent_details.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/agent_details_page/components/agent_details.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/agent_details_page/components/agent_details.tsx
index 63d93f14c63f5..de0c65d508db9 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/agent_details_page/components/agent_details.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/agent_details_page/components/agent_details.tsx
@@ -37,7 +37,7 @@ export const AgentDetailsContent: React.FunctionComponent<{
},
{
title: i18n.translate('xpack.ingestManager.agentDetails.hostIdLabel', {
- defaultMessage: 'Host ID',
+ defaultMessage: 'Agent ID',
}),
description: agent.id,
},
From 8834ca3e9a4991997dd938abf2ca0a33f7e36a63 Mon Sep 17 00:00:00 2001
From: Spencer
Date: Thu, 23 Jul 2020 10:58:04 -0700
Subject: [PATCH 26/59] [src/dev/build] typescript-ify and convert tests to
jest (#72525)
Co-authored-by: spalger
---
package.json | 3 +
packages/kbn-dev-utils/package.json | 1 +
packages/kbn-dev-utils/src/index.ts | 2 +-
.../serializers/absolute_path_serializer.ts | 2 +-
.../serializers/any_instance_serizlizer.ts | 25 ++
.../kbn-dev-utils/src/serializers/index.ts | 5 +-
.../src/serializers/recursive_serializer.ts | 29 ++
.../src/serializers/strip_ansi_serializer.ts | 29 ++
.../index.js => packages/kbn-pm/index.d.ts | 2 +-
packages/kbn-pm/tsconfig.json | 1 +
src/dev/build/args.test.ts | 264 +++++++------
src/dev/build/args.ts | 56 ++-
src/dev/build/build_distributables.js | 174 ---------
src/dev/build/build_distributables.ts | 123 ++++++
src/dev/build/{cli.js => cli.ts} | 8 +-
.../bin/world_executable | 0
.../fixtures => __fixtures__}/foo.txt.gz | Bin
.../fixtures => __fixtures__}/foo_dir.tar.gz | Bin
.../fixtures => __fixtures__}/foo_dir/.bar | 0
.../fixtures => __fixtures__}/foo_dir/bar.txt | 0
.../foo_dir/foo/foo.txt | 0
.../build/lib/__fixtures__}/log_on_sigint.js | 0
src/dev/build/lib/__tests__/build.js | 168 --------
src/dev/build/lib/__tests__/config.js | 174 ---------
src/dev/build/lib/__tests__/download.js | 237 ------------
src/dev/build/lib/__tests__/exec.js | 58 ---
src/dev/build/lib/__tests__/fs.js | 362 ------------------
src/dev/build/lib/__tests__/platform.js | 68 ----
src/dev/build/lib/__tests__/runner.js | 184 ---------
src/dev/build/lib/__tests__/version_info.js | 70 ----
src/dev/build/lib/build.js | 60 ---
src/dev/build/lib/build.test.ts | 120 ++++++
src/dev/build/lib/build.ts | 63 +++
src/dev/build/lib/config.js | 168 --------
src/dev/build/lib/config.test.ts | 201 ++++++++++
src/dev/build/lib/config.ts | 173 +++++++++
.../build/lib/{download.js => download.ts} | 21 +-
.../{__tests__/errors.js => errors.test.ts} | 22 +-
src/dev/build/lib/{errors.js => errors.ts} | 6 +-
src/dev/build/lib/exec.test.ts | 67 ++++
src/dev/build/lib/{exec.js => exec.ts} | 17 +-
src/dev/build/lib/{fs.js => fs.ts} | 157 ++++++--
src/dev/build/lib/index.js | 39 --
src/dev/build/lib/index.ts | 30 ++
.../lib/integration_tests/download.test.ts | 226 +++++++++++
.../build/lib/integration_tests/fs.test.ts | 358 +++++++++++++++++
.../{ => integration_tests}/scan_copy.test.ts | 9 +-
.../watch_stdio_for_line.test.ts | 52 +++
src/dev/build/lib/platform.js | 50 ---
src/dev/build/lib/platform.test.ts | 62 +++
src/dev/build/lib/platform.ts | 64 ++++
src/dev/build/lib/runner.test.ts | 248 ++++++++++++
src/dev/build/lib/{runner.js => runner.ts} | 51 +--
src/dev/build/lib/version_info.test.ts | 62 +++
.../lib/{version_info.js => version_info.ts} | 14 +-
.../build/lib/watch_stdio_for_line.ts} | 19 +-
...ripts_task.js => copy_bin_scripts_task.ts} | 4 +-
.../build/tasks/bin/{index.js => index.ts} | 2 +-
...ns.js => build_kibana_platform_plugins.ts} | 6 +-
...ackages_task.js => build_packages_task.ts} | 6 +-
.../tasks/{clean_tasks.js => clean_tasks.ts} | 16 +-
...opy_source_task.js => copy_source_task.ts} | 4 +-
...ask.js => create_archives_sources_task.ts} | 4 +-
...chives_task.js => create_archives_task.ts} | 44 +--
...js => create_empty_dirs_and_files_task.ts} | 4 +-
...on_task.js => create_package_json_task.ts} | 10 +-
...e_readme_task.js => create_readme_task.ts} | 4 +-
src/dev/build/tasks/{index.js => index.ts} | 4 +-
src/dev/build/tasks/install_chromium.js | 6 +-
...s_task.js => install_dependencies_task.ts} | 4 +-
...ense_file_task.js => license_file_task.ts} | 4 +-
.../__tests__/download_node_builds_task.js | 97 -----
.../__tests__/extract_node_builds_task.js | 93 -----
.../verify_existing_node_builds_task.js | 106 -----
...ilds_task.js => clean_node_builds_task.ts} | 4 +-
.../nodejs/download_node_builds_task.test.ts | 136 +++++++
...s_task.js => download_node_builds_task.ts} | 4 +-
.../nodejs/extract_node_builds_task.test.ts | 108 ++++++
...ds_task.js => extract_node_builds_task.ts} | 30 +-
src/dev/build/tasks/nodejs/index.js | 25 --
src/dev/build/tasks/nodejs/index.ts | 24 ++
...download_info.js => node_download_info.ts} | 4 +-
.../verify_existing_node_builds_task.test.ts | 225 +++++++++++
...js => verify_existing_node_builds_task.ts} | 4 +-
...otice_file_task.js => notice_file_task.ts} | 6 +-
.../{optimize_task.js => optimize_task.ts} | 4 +-
...ge_tasks.js => create_os_package_tasks.ts} | 9 +-
.../docker_generator/bundle_dockerfiles.js | 5 +-
.../{index.js => docker_generator/index.ts} | 8 +-
src/dev/build/tasks/os_packages/index.ts | 20 +
.../os_packages/{run_fpm.js => run_fpm.ts} | 16 +-
...s_task.js => patch_native_modules_task.ts} | 40 +-
...ath_length_task.js => path_length_task.ts} | 4 +-
..._babel_task.js => transpile_babel_task.ts} | 18 +-
...le_scss_task.js => transpile_scss_task.ts} | 5 +-
...tion_task.js => uuid_verification_task.ts} | 4 +-
...{verify_env_task.js => verify_env_task.ts} | 4 +-
...ha_sums_task.js => write_sha_sums_task.ts} | 4 +-
.../utils/__tests__/watch_stdio_for_line.js | 55 ---
src/legacy/utils/index.js | 1 -
src/legacy/utils/streams/index.d.ts | 2 +-
x-pack/package.json | 2 +-
yarn.lock | 8 +-
103 files changed, 3008 insertions(+), 2593 deletions(-)
create mode 100644 packages/kbn-dev-utils/src/serializers/any_instance_serizlizer.ts
create mode 100644 packages/kbn-dev-utils/src/serializers/recursive_serializer.ts
create mode 100644 packages/kbn-dev-utils/src/serializers/strip_ansi_serializer.ts
rename src/dev/build/tasks/os_packages/docker_generator/index.js => packages/kbn-pm/index.d.ts (96%)
delete mode 100644 src/dev/build/build_distributables.js
create mode 100644 src/dev/build/build_distributables.ts
rename src/dev/build/{cli.js => cli.ts} (91%)
rename src/dev/build/lib/{__tests__/fixtures => __fixtures__}/bin/world_executable (100%)
rename src/dev/build/lib/{__tests__/fixtures => __fixtures__}/foo.txt.gz (100%)
rename src/dev/build/lib/{__tests__/fixtures => __fixtures__}/foo_dir.tar.gz (100%)
rename src/dev/build/lib/{__tests__/fixtures => __fixtures__}/foo_dir/.bar (100%)
rename src/dev/build/lib/{__tests__/fixtures => __fixtures__}/foo_dir/bar.txt (100%)
rename src/dev/build/lib/{__tests__/fixtures => __fixtures__}/foo_dir/foo/foo.txt (100%)
rename src/{legacy/utils/__tests__/fixtures => dev/build/lib/__fixtures__}/log_on_sigint.js (100%)
delete mode 100644 src/dev/build/lib/__tests__/build.js
delete mode 100644 src/dev/build/lib/__tests__/config.js
delete mode 100644 src/dev/build/lib/__tests__/download.js
delete mode 100644 src/dev/build/lib/__tests__/exec.js
delete mode 100644 src/dev/build/lib/__tests__/fs.js
delete mode 100644 src/dev/build/lib/__tests__/platform.js
delete mode 100644 src/dev/build/lib/__tests__/runner.js
delete mode 100644 src/dev/build/lib/__tests__/version_info.js
delete mode 100644 src/dev/build/lib/build.js
create mode 100644 src/dev/build/lib/build.test.ts
create mode 100644 src/dev/build/lib/build.ts
delete mode 100644 src/dev/build/lib/config.js
create mode 100644 src/dev/build/lib/config.test.ts
create mode 100644 src/dev/build/lib/config.ts
rename src/dev/build/lib/{download.js => download.ts} (81%)
rename src/dev/build/lib/{__tests__/errors.js => errors.test.ts} (67%)
rename src/dev/build/lib/{errors.js => errors.ts} (86%)
create mode 100644 src/dev/build/lib/exec.test.ts
rename src/dev/build/lib/{exec.js => exec.ts} (73%)
rename src/dev/build/lib/{fs.js => fs.ts} (56%)
delete mode 100644 src/dev/build/lib/index.js
create mode 100644 src/dev/build/lib/index.ts
create mode 100644 src/dev/build/lib/integration_tests/download.test.ts
create mode 100644 src/dev/build/lib/integration_tests/fs.test.ts
rename src/dev/build/lib/{ => integration_tests}/scan_copy.test.ts (94%)
create mode 100644 src/dev/build/lib/integration_tests/watch_stdio_for_line.test.ts
delete mode 100644 src/dev/build/lib/platform.js
create mode 100644 src/dev/build/lib/platform.test.ts
create mode 100644 src/dev/build/lib/platform.ts
create mode 100644 src/dev/build/lib/runner.test.ts
rename src/dev/build/lib/{runner.js => runner.ts} (72%)
create mode 100644 src/dev/build/lib/version_info.test.ts
rename src/dev/build/lib/{version_info.js => version_info.ts} (84%)
rename src/{legacy/utils/watch_stdio_for_line.js => dev/build/lib/watch_stdio_for_line.ts} (83%)
rename src/dev/build/tasks/bin/{copy_bin_scripts_task.js => copy_bin_scripts_task.ts} (92%)
rename src/dev/build/tasks/bin/{index.js => index.ts} (92%)
rename src/dev/build/tasks/{build_kibana_platform_plugins.js => build_kibana_platform_plugins.ts} (92%)
rename src/dev/build/tasks/{build_packages_task.js => build_packages_task.ts} (97%)
rename src/dev/build/tasks/{clean_tasks.js => clean_tasks.ts} (92%)
rename src/dev/build/tasks/{copy_source_task.js => copy_source_task.ts} (95%)
rename src/dev/build/tasks/{create_archives_sources_task.js => create_archives_sources_task.ts} (95%)
rename src/dev/build/tasks/{create_archives_task.js => create_archives_task.ts} (80%)
rename src/dev/build/tasks/{create_empty_dirs_and_files_task.js => create_empty_dirs_and_files_task.ts} (92%)
rename src/dev/build/tasks/{create_package_json_task.js => create_package_json_task.ts} (92%)
rename src/dev/build/tasks/{create_readme_task.js => create_readme_task.ts} (93%)
rename src/dev/build/tasks/{index.js => index.ts} (92%)
rename src/dev/build/tasks/{install_dependencies_task.js => install_dependencies_task.ts} (94%)
rename src/dev/build/tasks/{license_file_task.js => license_file_task.ts} (94%)
delete mode 100644 src/dev/build/tasks/nodejs/__tests__/download_node_builds_task.js
delete mode 100644 src/dev/build/tasks/nodejs/__tests__/extract_node_builds_task.js
delete mode 100644 src/dev/build/tasks/nodejs/__tests__/verify_existing_node_builds_task.js
rename src/dev/build/tasks/nodejs/{clean_node_builds_task.js => clean_node_builds_task.ts} (93%)
create mode 100644 src/dev/build/tasks/nodejs/download_node_builds_task.test.ts
rename src/dev/build/tasks/nodejs/{download_node_builds_task.js => download_node_builds_task.ts} (93%)
create mode 100644 src/dev/build/tasks/nodejs/extract_node_builds_task.test.ts
rename src/dev/build/tasks/nodejs/{extract_node_builds_task.js => extract_node_builds_task.ts} (56%)
delete mode 100644 src/dev/build/tasks/nodejs/index.js
create mode 100644 src/dev/build/tasks/nodejs/index.ts
rename src/dev/build/tasks/nodejs/{node_download_info.js => node_download_info.ts} (92%)
create mode 100644 src/dev/build/tasks/nodejs/verify_existing_node_builds_task.test.ts
rename src/dev/build/tasks/nodejs/{verify_existing_node_builds_task.js => verify_existing_node_builds_task.ts} (93%)
rename src/dev/build/tasks/{notice_file_task.js => notice_file_task.ts} (95%)
rename src/dev/build/tasks/{optimize_task.js => optimize_task.ts} (95%)
rename src/dev/build/tasks/os_packages/{create_os_package_tasks.js => create_os_package_tasks.ts} (89%)
rename src/dev/build/tasks/os_packages/{index.js => docker_generator/index.ts} (84%)
create mode 100644 src/dev/build/tasks/os_packages/index.ts
rename src/dev/build/tasks/os_packages/{run_fpm.js => run_fpm.ts} (91%)
rename src/dev/build/tasks/{patch_native_modules_task.js => patch_native_modules_task.ts} (82%)
rename src/dev/build/tasks/{path_length_task.js => path_length_task.ts} (95%)
rename src/dev/build/tasks/{transpile_babel_task.js => transpile_babel_task.ts} (80%)
rename src/dev/build/tasks/{transpile_scss_task.js => transpile_scss_task.ts} (89%)
rename src/dev/build/tasks/{uuid_verification_task.js => uuid_verification_task.ts} (94%)
rename src/dev/build/tasks/{verify_env_task.js => verify_env_task.ts} (93%)
rename src/dev/build/tasks/{write_sha_sums_task.js => write_sha_sums_task.ts} (92%)
delete mode 100644 src/legacy/utils/__tests__/watch_stdio_for_line.js
diff --git a/package.json b/package.json
index 0d6bc8cc1fceb..594f0ce583987 100644
--- a/package.json
+++ b/package.json
@@ -317,6 +317,7 @@
"@types/accept": "3.1.1",
"@types/angular": "^1.6.56",
"@types/angular-mocks": "^1.7.0",
+ "@types/archiver": "^3.1.0",
"@types/babel__core": "^7.1.2",
"@types/bluebird": "^3.1.1",
"@types/boom": "^7.2.0",
@@ -398,6 +399,7 @@
"@types/testing-library__react-hooks": "^3.1.0",
"@types/type-detect": "^4.0.1",
"@types/uuid": "^3.4.4",
+ "@types/vinyl": "^2.0.4",
"@types/vinyl-fs": "^2.4.11",
"@types/zen-observable": "^0.8.0",
"@typescript-eslint/eslint-plugin": "^2.34.0",
@@ -474,6 +476,7 @@
"license-checker": "^16.0.0",
"listr": "^0.14.1",
"load-grunt-config": "^3.0.1",
+ "load-json-file": "^6.2.0",
"mocha": "^7.1.1",
"mock-fs": "^4.12.0",
"mock-http-server": "1.3.0",
diff --git a/packages/kbn-dev-utils/package.json b/packages/kbn-dev-utils/package.json
index b307bd41bb4dd..83a7a7607816c 100644
--- a/packages/kbn-dev-utils/package.json
+++ b/packages/kbn-dev-utils/package.json
@@ -20,6 +20,7 @@
"normalize-path": "^3.0.0",
"moment": "^2.24.0",
"rxjs": "^6.5.5",
+ "strip-ansi": "^6.0.0",
"tree-kill": "^1.2.2",
"tslib": "^2.0.0"
},
diff --git a/packages/kbn-dev-utils/src/index.ts b/packages/kbn-dev-utils/src/index.ts
index 582526f939e42..798746d159f60 100644
--- a/packages/kbn-dev-utils/src/index.ts
+++ b/packages/kbn-dev-utils/src/index.ts
@@ -19,7 +19,7 @@
export { withProcRunner, ProcRunner } from './proc_runner';
export * from './tooling_log';
-export { createAbsolutePathSerializer } from './serializers';
+export * from './serializers';
export {
CA_CERT_PATH,
ES_KEY_PATH,
diff --git a/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts b/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts
index af55622c76198..884614c8b9551 100644
--- a/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts
+++ b/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts
@@ -21,7 +21,7 @@ import { REPO_ROOT } from '../repo_root';
export function createAbsolutePathSerializer(rootPath: string = REPO_ROOT) {
return {
- serialize: (value: string) => value.replace(rootPath, '').replace(/\\/g, '/'),
test: (value: any) => typeof value === 'string' && value.startsWith(rootPath),
+ serialize: (value: string) => value.replace(rootPath, '').replace(/\\/g, '/'),
};
}
diff --git a/packages/kbn-dev-utils/src/serializers/any_instance_serizlizer.ts b/packages/kbn-dev-utils/src/serializers/any_instance_serizlizer.ts
new file mode 100644
index 0000000000000..c5cc095e9ee82
--- /dev/null
+++ b/packages/kbn-dev-utils/src/serializers/any_instance_serizlizer.ts
@@ -0,0 +1,25 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export function createAnyInstanceSerializer(Class: Function, name?: string) {
+ return {
+ test: (v: any) => v instanceof Class,
+ serialize: () => `<${name ?? Class.name}>`,
+ };
+}
diff --git a/packages/kbn-dev-utils/src/serializers/index.ts b/packages/kbn-dev-utils/src/serializers/index.ts
index 3b49e243058df..e645a3be3fe5d 100644
--- a/packages/kbn-dev-utils/src/serializers/index.ts
+++ b/packages/kbn-dev-utils/src/serializers/index.ts
@@ -17,4 +17,7 @@
* under the License.
*/
-export { createAbsolutePathSerializer } from './absolute_path_serializer';
+export * from './absolute_path_serializer';
+export * from './strip_ansi_serializer';
+export * from './recursive_serializer';
+export * from './any_instance_serizlizer';
diff --git a/packages/kbn-dev-utils/src/serializers/recursive_serializer.ts b/packages/kbn-dev-utils/src/serializers/recursive_serializer.ts
new file mode 100644
index 0000000000000..537ae4972c842
--- /dev/null
+++ b/packages/kbn-dev-utils/src/serializers/recursive_serializer.ts
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export function createRecursiveSerializer(test: (v: any) => boolean, print: (v: any) => string) {
+ return {
+ test: (v: any) => test(v),
+ serialize: (v: any, ...rest: any[]) => {
+ const replacement = print(v);
+ const printer = rest.pop()!;
+ return printer(replacement, ...rest);
+ },
+ };
+}
diff --git a/packages/kbn-dev-utils/src/serializers/strip_ansi_serializer.ts b/packages/kbn-dev-utils/src/serializers/strip_ansi_serializer.ts
new file mode 100644
index 0000000000000..4a2151c06f34f
--- /dev/null
+++ b/packages/kbn-dev-utils/src/serializers/strip_ansi_serializer.ts
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import stripAnsi from 'strip-ansi';
+
+import { createRecursiveSerializer } from './recursive_serializer';
+
+export function createStripAnsiSerializer() {
+ return createRecursiveSerializer(
+ (v) => typeof v === 'string' && stripAnsi(v) !== v,
+ (v) => stripAnsi(v)
+ );
+}
diff --git a/src/dev/build/tasks/os_packages/docker_generator/index.js b/packages/kbn-pm/index.d.ts
similarity index 96%
rename from src/dev/build/tasks/os_packages/docker_generator/index.js
rename to packages/kbn-pm/index.d.ts
index 9e0bbf51f9a56..aa55df9215c2f 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/index.js
+++ b/packages/kbn-pm/index.d.ts
@@ -17,4 +17,4 @@
* under the License.
*/
-export * from './run';
+export * from './src/index';
diff --git a/packages/kbn-pm/tsconfig.json b/packages/kbn-pm/tsconfig.json
index bfb13ee8dcf8a..c13a9243c50aa 100644
--- a/packages/kbn-pm/tsconfig.json
+++ b/packages/kbn-pm/tsconfig.json
@@ -1,6 +1,7 @@
{
"extends": "../../tsconfig.json",
"include": [
+ "./index.d.ts",
"./src/**/*.ts",
"./dist/*.d.ts",
],
diff --git a/src/dev/build/args.test.ts b/src/dev/build/args.test.ts
index 6a464eef209ec..bd118b8887c72 100644
--- a/src/dev/build/args.test.ts
+++ b/src/dev/build/args.test.ts
@@ -17,160 +17,158 @@
* under the License.
*/
-import { ToolingLog } from '@kbn/dev-utils';
+import { ToolingLog, createAnyInstanceSerializer } from '@kbn/dev-utils';
import { readCliArgs } from './args';
-const fn = (...subArgs: string[]) => {
- const result = readCliArgs(['node', 'scripts/build', ...subArgs]);
- (result as any).log = result.log instanceof ToolingLog ? '' : String(result.log);
- return result;
-};
+expect.addSnapshotSerializer(createAnyInstanceSerializer(ToolingLog));
it('renders help if `--help` passed', () => {
- expect(fn('--help')).toMatchInlineSnapshot(`
-Object {
- "log": "undefined",
- "showHelp": true,
- "unknownFlags": Array [],
-}
-`);
+ expect(readCliArgs(['node', 'scripts/build', '--help'])).toMatchInlineSnapshot(`
+ Object {
+ "log": ,
+ "showHelp": true,
+ "unknownFlags": Array [],
+ }
+ `);
});
it('build default and oss dist for current platform, without packages, by default', () => {
- expect(fn()).toMatchInlineSnapshot(`
-Object {
- "buildArgs": Object {
- "buildDefaultDist": true,
- "buildOssDist": true,
- "createArchives": true,
- "createDebPackage": false,
- "createDockerPackage": false,
- "createDockerUbiPackage": false,
- "createRpmPackage": false,
- "downloadFreshNode": true,
- "isRelease": false,
- "targetAllPlatforms": false,
- "versionQualifier": "",
- },
- "log": "",
- "showHelp": false,
- "unknownFlags": Array [],
-}
-`);
+ expect(readCliArgs(['node', 'scripts/build'])).toMatchInlineSnapshot(`
+ Object {
+ "buildOptions": Object {
+ "buildDefaultDist": true,
+ "buildOssDist": true,
+ "createArchives": true,
+ "createDebPackage": false,
+ "createDockerPackage": false,
+ "createDockerUbiPackage": false,
+ "createRpmPackage": false,
+ "downloadFreshNode": true,
+ "isRelease": false,
+ "targetAllPlatforms": false,
+ "versionQualifier": "",
+ },
+ "log": ,
+ "showHelp": false,
+ "unknownFlags": Array [],
+ }
+ `);
});
it('builds packages if --all-platforms is passed', () => {
- expect(fn('--all-platforms')).toMatchInlineSnapshot(`
-Object {
- "buildArgs": Object {
- "buildDefaultDist": true,
- "buildOssDist": true,
- "createArchives": true,
- "createDebPackage": true,
- "createDockerPackage": true,
- "createDockerUbiPackage": true,
- "createRpmPackage": true,
- "downloadFreshNode": true,
- "isRelease": false,
- "targetAllPlatforms": true,
- "versionQualifier": "",
- },
- "log": "",
- "showHelp": false,
- "unknownFlags": Array [],
-}
-`);
+ expect(readCliArgs(['node', 'scripts/build', '--all-platforms'])).toMatchInlineSnapshot(`
+ Object {
+ "buildOptions": Object {
+ "buildDefaultDist": true,
+ "buildOssDist": true,
+ "createArchives": true,
+ "createDebPackage": true,
+ "createDockerPackage": true,
+ "createDockerUbiPackage": true,
+ "createRpmPackage": true,
+ "downloadFreshNode": true,
+ "isRelease": false,
+ "targetAllPlatforms": true,
+ "versionQualifier": "",
+ },
+ "log": ,
+ "showHelp": false,
+ "unknownFlags": Array [],
+ }
+ `);
});
it('limits packages if --rpm passed with --all-platforms', () => {
- expect(fn('--all-platforms', '--rpm')).toMatchInlineSnapshot(`
-Object {
- "buildArgs": Object {
- "buildDefaultDist": true,
- "buildOssDist": true,
- "createArchives": true,
- "createDebPackage": false,
- "createDockerPackage": false,
- "createDockerUbiPackage": false,
- "createRpmPackage": true,
- "downloadFreshNode": true,
- "isRelease": false,
- "targetAllPlatforms": true,
- "versionQualifier": "",
- },
- "log": "",
- "showHelp": false,
- "unknownFlags": Array [],
-}
-`);
+ expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--rpm'])).toMatchInlineSnapshot(`
+ Object {
+ "buildOptions": Object {
+ "buildDefaultDist": true,
+ "buildOssDist": true,
+ "createArchives": true,
+ "createDebPackage": false,
+ "createDockerPackage": false,
+ "createDockerUbiPackage": false,
+ "createRpmPackage": true,
+ "downloadFreshNode": true,
+ "isRelease": false,
+ "targetAllPlatforms": true,
+ "versionQualifier": "",
+ },
+ "log": ,
+ "showHelp": false,
+ "unknownFlags": Array [],
+ }
+ `);
});
it('limits packages if --deb passed with --all-platforms', () => {
- expect(fn('--all-platforms', '--deb')).toMatchInlineSnapshot(`
-Object {
- "buildArgs": Object {
- "buildDefaultDist": true,
- "buildOssDist": true,
- "createArchives": true,
- "createDebPackage": true,
- "createDockerPackage": false,
- "createDockerUbiPackage": false,
- "createRpmPackage": false,
- "downloadFreshNode": true,
- "isRelease": false,
- "targetAllPlatforms": true,
- "versionQualifier": "",
- },
- "log": "",
- "showHelp": false,
- "unknownFlags": Array [],
-}
-`);
+ expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--deb'])).toMatchInlineSnapshot(`
+ Object {
+ "buildOptions": Object {
+ "buildDefaultDist": true,
+ "buildOssDist": true,
+ "createArchives": true,
+ "createDebPackage": true,
+ "createDockerPackage": false,
+ "createDockerUbiPackage": false,
+ "createRpmPackage": false,
+ "downloadFreshNode": true,
+ "isRelease": false,
+ "targetAllPlatforms": true,
+ "versionQualifier": "",
+ },
+ "log": ,
+ "showHelp": false,
+ "unknownFlags": Array [],
+ }
+ `);
});
it('limits packages if --docker passed with --all-platforms', () => {
- expect(fn('--all-platforms', '--docker')).toMatchInlineSnapshot(`
-Object {
- "buildArgs": Object {
- "buildDefaultDist": true,
- "buildOssDist": true,
- "createArchives": true,
- "createDebPackage": false,
- "createDockerPackage": true,
- "createDockerUbiPackage": true,
- "createRpmPackage": false,
- "downloadFreshNode": true,
- "isRelease": false,
- "targetAllPlatforms": true,
- "versionQualifier": "",
- },
- "log": "",
- "showHelp": false,
- "unknownFlags": Array [],
-}
-`);
+ expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--docker']))
+ .toMatchInlineSnapshot(`
+ Object {
+ "buildOptions": Object {
+ "buildDefaultDist": true,
+ "buildOssDist": true,
+ "createArchives": true,
+ "createDebPackage": false,
+ "createDockerPackage": true,
+ "createDockerUbiPackage": true,
+ "createRpmPackage": false,
+ "downloadFreshNode": true,
+ "isRelease": false,
+ "targetAllPlatforms": true,
+ "versionQualifier": "",
+ },
+ "log": ,
+ "showHelp": false,
+ "unknownFlags": Array [],
+ }
+ `);
});
it('limits packages if --docker passed with --skip-docker-ubi and --all-platforms', () => {
- expect(fn('--all-platforms', '--docker', '--skip-docker-ubi')).toMatchInlineSnapshot(`
-Object {
- "buildArgs": Object {
- "buildDefaultDist": true,
- "buildOssDist": true,
- "createArchives": true,
- "createDebPackage": false,
- "createDockerPackage": true,
- "createDockerUbiPackage": false,
- "createRpmPackage": false,
- "downloadFreshNode": true,
- "isRelease": false,
- "targetAllPlatforms": true,
- "versionQualifier": "",
- },
- "log": "",
- "showHelp": false,
- "unknownFlags": Array [],
-}
-`);
+ expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--docker', '--skip-docker-ubi']))
+ .toMatchInlineSnapshot(`
+ Object {
+ "buildOptions": Object {
+ "buildDefaultDist": true,
+ "buildOssDist": true,
+ "createArchives": true,
+ "createDebPackage": false,
+ "createDockerPackage": true,
+ "createDockerUbiPackage": false,
+ "createRpmPackage": false,
+ "downloadFreshNode": true,
+ "isRelease": false,
+ "targetAllPlatforms": true,
+ "versionQualifier": "",
+ },
+ "log": ,
+ "showHelp": false,
+ "unknownFlags": Array [],
+ }
+ `);
});
diff --git a/src/dev/build/args.ts b/src/dev/build/args.ts
index 1ff42d524c596..8e77024a7e8ae 100644
--- a/src/dev/build/args.ts
+++ b/src/dev/build/args.ts
@@ -20,16 +20,9 @@
import getopts from 'getopts';
import { ToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
-interface ParsedArgs {
- showHelp: boolean;
- unknownFlags: string[];
- log?: ToolingLog;
- buildArgs?: {
- [key: string]: any;
- };
-}
+import { BuildOptions } from './build_distributables';
-export function readCliArgs(argv: string[]): ParsedArgs {
+export function readCliArgs(argv: string[]) {
const unknownFlags: string[] = [];
const flags = getopts(argv, {
boolean: [
@@ -70,8 +63,16 @@ export function readCliArgs(argv: string[]): ParsedArgs {
},
});
+ const log = new ToolingLog({
+ level: pickLevelFromFlags(flags, {
+ default: flags.debug === false ? 'info' : 'debug',
+ }),
+ writeTo: process.stdout,
+ });
+
if (unknownFlags.length || flags.help) {
return {
+ log,
showHelp: true,
unknownFlags,
};
@@ -83,13 +84,6 @@ export function readCliArgs(argv: string[]): ParsedArgs {
flags['all-platforms'] = true;
}
- const log = new ToolingLog({
- level: pickLevelFromFlags(flags, {
- default: flags.debug === false ? 'info' : 'debug',
- }),
- writeTo: process.stdout,
- });
-
function isOsPackageDesired(name: string) {
if (flags['skip-os-packages'] || !flags['all-platforms']) {
return false;
@@ -103,22 +97,24 @@ export function readCliArgs(argv: string[]): ParsedArgs {
return Boolean(flags[name]);
}
+ const buildOptions: BuildOptions = {
+ isRelease: Boolean(flags.release),
+ versionQualifier: flags['version-qualifier'],
+ buildOssDist: flags.oss !== false,
+ buildDefaultDist: !flags.oss,
+ downloadFreshNode: !Boolean(flags['skip-node-download']),
+ createArchives: !Boolean(flags['skip-archives']),
+ createRpmPackage: isOsPackageDesired('rpm'),
+ createDebPackage: isOsPackageDesired('deb'),
+ createDockerPackage: isOsPackageDesired('docker'),
+ createDockerUbiPackage: isOsPackageDesired('docker') && !Boolean(flags['skip-docker-ubi']),
+ targetAllPlatforms: Boolean(flags['all-platforms']),
+ };
+
return {
+ log,
showHelp: false,
unknownFlags: [],
- log,
- buildArgs: {
- isRelease: Boolean(flags.release),
- versionQualifier: flags['version-qualifier'],
- buildOssDist: flags.oss !== false,
- buildDefaultDist: !flags.oss,
- downloadFreshNode: !Boolean(flags['skip-node-download']),
- createArchives: !Boolean(flags['skip-archives']),
- createRpmPackage: isOsPackageDesired('rpm'),
- createDebPackage: isOsPackageDesired('deb'),
- createDockerPackage: isOsPackageDesired('docker'),
- createDockerUbiPackage: isOsPackageDesired('docker') && !Boolean(flags['skip-docker-ubi']),
- targetAllPlatforms: Boolean(flags['all-platforms']),
- },
+ buildOptions,
};
}
diff --git a/src/dev/build/build_distributables.js b/src/dev/build/build_distributables.js
deleted file mode 100644
index 39a32fff891c2..0000000000000
--- a/src/dev/build/build_distributables.js
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { getConfig, createRunner } from './lib';
-
-import {
- BuildKibanaPlatformPluginsTask,
- BuildPackagesTask,
- CleanEmptyFoldersTask,
- CleanExtraBinScriptsTask,
- CleanExtraFilesFromModulesTask,
- CleanNodeBuildsTask,
- CleanPackagesTask,
- CleanTask,
- CleanTypescriptTask,
- CopyBinScriptsTask,
- CopySourceTask,
- CreateArchivesSourcesTask,
- CreateArchivesTask,
- CreateDebPackageTask,
- CreateDockerPackageTask,
- CreateDockerUbiPackageTask,
- CreateEmptyDirsAndFilesTask,
- CreateNoticeFileTask,
- CreatePackageJsonTask,
- CreateReadmeTask,
- CreateRpmPackageTask,
- DownloadNodeBuildsTask,
- ExtractNodeBuildsTask,
- InstallChromiumTask,
- InstallDependenciesTask,
- OptimizeBuildTask,
- PatchNativeModulesTask,
- PathLengthTask,
- RemovePackageJsonDepsTask,
- RemoveWorkspacesTask,
- TranspileBabelTask,
- TranspileScssTask,
- UpdateLicenseFileTask,
- UuidVerificationTask,
- VerifyEnvTask,
- VerifyExistingNodeBuildsTask,
- WriteShaSumsTask,
-} from './tasks';
-
-export async function buildDistributables(options) {
- const {
- log,
- isRelease,
- buildOssDist,
- buildDefaultDist,
- downloadFreshNode,
- createArchives,
- createRpmPackage,
- createDebPackage,
- createDockerPackage,
- createDockerUbiPackage,
- versionQualifier,
- targetAllPlatforms,
- } = options;
-
- log.verbose('building distributables with options:', {
- isRelease,
- buildOssDist,
- buildDefaultDist,
- downloadFreshNode,
- createArchives,
- createRpmPackage,
- createDebPackage,
- versionQualifier,
- });
-
- const config = await getConfig({
- isRelease,
- versionQualifier,
- targetAllPlatforms,
- });
-
- const run = createRunner({
- config,
- log,
- buildOssDist,
- buildDefaultDist,
- });
-
- /**
- * verify, reset, and initialize the build environment
- */
- await run(VerifyEnvTask);
- await run(CleanTask);
- await run(downloadFreshNode ? DownloadNodeBuildsTask : VerifyExistingNodeBuildsTask);
- await run(ExtractNodeBuildsTask);
-
- /**
- * run platform-generic build tasks
- */
- await run(CopySourceTask);
- await run(CopyBinScriptsTask);
- await run(CreateEmptyDirsAndFilesTask);
- await run(CreateReadmeTask);
- await run(TranspileBabelTask);
- await run(BuildPackagesTask);
- await run(CreatePackageJsonTask);
- await run(InstallDependenciesTask);
- await run(RemoveWorkspacesTask);
- await run(CleanPackagesTask);
- await run(CreateNoticeFileTask);
- await run(UpdateLicenseFileTask);
- await run(RemovePackageJsonDepsTask);
- await run(TranspileScssTask);
- await run(BuildKibanaPlatformPluginsTask);
- await run(OptimizeBuildTask);
- await run(CleanTypescriptTask);
- await run(CleanExtraFilesFromModulesTask);
- await run(CleanEmptyFoldersTask);
-
- /**
- * copy generic build outputs into platform-specific build
- * directories and perform platform/architecture-specific steps
- */
- await run(CreateArchivesSourcesTask);
- await run(PatchNativeModulesTask);
- await run(InstallChromiumTask);
- await run(CleanExtraBinScriptsTask);
- await run(CleanNodeBuildsTask);
-
- await run(PathLengthTask);
- await run(UuidVerificationTask);
-
- /**
- * package platform-specific builds into archives
- * or os-specific packages in the target directory
- */
- if (createArchives) {
- // control w/ --skip-archives
- await run(CreateArchivesTask);
- }
- if (createDebPackage) {
- // control w/ --deb or --skip-os-packages
- await run(CreateDebPackageTask);
- }
- if (createRpmPackage) {
- // control w/ --rpm or --skip-os-packages
- await run(CreateRpmPackageTask);
- }
- if (createDockerPackage) {
- // control w/ --docker or --skip-docker-ubi or --skip-os-packages
- await run(CreateDockerPackageTask);
- if (createDockerUbiPackage) {
- await run(CreateDockerUbiPackageTask);
- }
- }
-
- /**
- * finalize artifacts by writing sha1sums of each into the target directory
- */
- await run(WriteShaSumsTask);
-}
diff --git a/src/dev/build/build_distributables.ts b/src/dev/build/build_distributables.ts
new file mode 100644
index 0000000000000..bfcc98d6cd9a8
--- /dev/null
+++ b/src/dev/build/build_distributables.ts
@@ -0,0 +1,123 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { Config, createRunner } from './lib';
+import * as Tasks from './tasks';
+
+export interface BuildOptions {
+ isRelease: boolean;
+ buildOssDist: boolean;
+ buildDefaultDist: boolean;
+ downloadFreshNode: boolean;
+ createArchives: boolean;
+ createRpmPackage: boolean;
+ createDebPackage: boolean;
+ createDockerPackage: boolean;
+ createDockerUbiPackage: boolean;
+ versionQualifier: string | undefined;
+ targetAllPlatforms: boolean;
+}
+
+export async function buildDistributables(log: ToolingLog, options: BuildOptions) {
+ log.verbose('building distributables with options:', options);
+
+ const config = await Config.create(options);
+
+ const run = createRunner({
+ config,
+ log,
+ buildDefaultDist: options.buildDefaultDist,
+ buildOssDist: options.buildOssDist,
+ });
+
+ /**
+ * verify, reset, and initialize the build environment
+ */
+ await run(Tasks.VerifyEnv);
+ await run(Tasks.Clean);
+ await run(options.downloadFreshNode ? Tasks.DownloadNodeBuilds : Tasks.VerifyExistingNodeBuilds);
+ await run(Tasks.ExtractNodeBuilds);
+
+ /**
+ * run platform-generic build tasks
+ */
+ await run(Tasks.CopySource);
+ await run(Tasks.CopyBinScripts);
+ await run(Tasks.CreateEmptyDirsAndFiles);
+ await run(Tasks.CreateReadme);
+ await run(Tasks.TranspileBabel);
+ await run(Tasks.BuildPackages);
+ await run(Tasks.CreatePackageJson);
+ await run(Tasks.InstallDependencies);
+ await run(Tasks.RemoveWorkspaces);
+ await run(Tasks.CleanPackages);
+ await run(Tasks.CreateNoticeFile);
+ await run(Tasks.UpdateLicenseFile);
+ await run(Tasks.RemovePackageJsonDeps);
+ await run(Tasks.TranspileScss);
+ await run(Tasks.BuildKibanaPlatformPlugins);
+ await run(Tasks.OptimizeBuild);
+ await run(Tasks.CleanTypescript);
+ await run(Tasks.CleanExtraFilesFromModules);
+ await run(Tasks.CleanEmptyFolders);
+
+ /**
+ * copy generic build outputs into platform-specific build
+ * directories and perform platform/architecture-specific steps
+ */
+ await run(Tasks.CreateArchivesSources);
+ await run(Tasks.PatchNativeModules);
+ await run(Tasks.InstallChromium);
+ await run(Tasks.CleanExtraBinScripts);
+ await run(Tasks.CleanNodeBuilds);
+
+ await run(Tasks.PathLength);
+ await run(Tasks.UuidVerification);
+
+ /**
+ * package platform-specific builds into archives
+ * or os-specific packages in the target directory
+ */
+ if (options.createArchives) {
+ // control w/ --skip-archives
+ await run(Tasks.CreateArchives);
+ }
+ if (options.createDebPackage) {
+ // control w/ --deb or --skip-os-packages
+ await run(Tasks.CreateDebPackage);
+ }
+ if (options.createRpmPackage) {
+ // control w/ --rpm or --skip-os-packages
+ await run(Tasks.CreateRpmPackage);
+ }
+ if (options.createDockerPackage) {
+ // control w/ --docker or --skip-docker-ubi or --skip-os-packages
+ await run(Tasks.CreateDockerPackage);
+ if (options.createDockerUbiPackage) {
+ await run(Tasks.CreateDockerUbiPackage);
+ }
+ }
+
+ /**
+ * finalize artifacts by writing sha1sums of each into the target directory
+ */
+ await run(Tasks.WriteShaSums);
+}
diff --git a/src/dev/build/cli.js b/src/dev/build/cli.ts
similarity index 91%
rename from src/dev/build/cli.js
rename to src/dev/build/cli.ts
index 9d23f92a3bafd..5811fc42d2009 100644
--- a/src/dev/build/cli.js
+++ b/src/dev/build/cli.ts
@@ -29,15 +29,15 @@ import { readCliArgs } from './args';
// ensure the cwd() is always the repo root
process.chdir(resolve(__dirname, '../../../'));
-const { showHelp, unknownFlags, log, buildArgs } = readCliArgs(process.argv);
+const { showHelp, unknownFlags, log, buildOptions } = readCliArgs(process.argv);
if (unknownFlags.length) {
const pluralized = unknownFlags.length > 1 ? 'flags' : 'flag';
- console.log(chalk`\n{red Unknown ${pluralized}: ${unknownFlags.join(', ')}}\n`);
+ log.error(`Unknown ${pluralized}: ${unknownFlags.join(', ')}}`);
}
if (showHelp) {
- console.log(
+ log.write(
dedent(chalk`
{dim usage:} node scripts/build
@@ -63,7 +63,7 @@ if (showHelp) {
process.exit(1);
}
-buildDistributables({ log, ...buildArgs }).catch((error) => {
+buildDistributables(log, buildOptions!).catch((error) => {
if (!isErrorLogged(error)) {
log.error('Uncaught error');
log.error(error);
diff --git a/src/dev/build/lib/__tests__/fixtures/bin/world_executable b/src/dev/build/lib/__fixtures__/bin/world_executable
similarity index 100%
rename from src/dev/build/lib/__tests__/fixtures/bin/world_executable
rename to src/dev/build/lib/__fixtures__/bin/world_executable
diff --git a/src/dev/build/lib/__tests__/fixtures/foo.txt.gz b/src/dev/build/lib/__fixtures__/foo.txt.gz
similarity index 100%
rename from src/dev/build/lib/__tests__/fixtures/foo.txt.gz
rename to src/dev/build/lib/__fixtures__/foo.txt.gz
diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir.tar.gz b/src/dev/build/lib/__fixtures__/foo_dir.tar.gz
similarity index 100%
rename from src/dev/build/lib/__tests__/fixtures/foo_dir.tar.gz
rename to src/dev/build/lib/__fixtures__/foo_dir.tar.gz
diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir/.bar b/src/dev/build/lib/__fixtures__/foo_dir/.bar
similarity index 100%
rename from src/dev/build/lib/__tests__/fixtures/foo_dir/.bar
rename to src/dev/build/lib/__fixtures__/foo_dir/.bar
diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir/bar.txt b/src/dev/build/lib/__fixtures__/foo_dir/bar.txt
similarity index 100%
rename from src/dev/build/lib/__tests__/fixtures/foo_dir/bar.txt
rename to src/dev/build/lib/__fixtures__/foo_dir/bar.txt
diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir/foo/foo.txt b/src/dev/build/lib/__fixtures__/foo_dir/foo/foo.txt
similarity index 100%
rename from src/dev/build/lib/__tests__/fixtures/foo_dir/foo/foo.txt
rename to src/dev/build/lib/__fixtures__/foo_dir/foo/foo.txt
diff --git a/src/legacy/utils/__tests__/fixtures/log_on_sigint.js b/src/dev/build/lib/__fixtures__/log_on_sigint.js
similarity index 100%
rename from src/legacy/utils/__tests__/fixtures/log_on_sigint.js
rename to src/dev/build/lib/__fixtures__/log_on_sigint.js
diff --git a/src/dev/build/lib/__tests__/build.js b/src/dev/build/lib/__tests__/build.js
deleted file mode 100644
index af9479e73f3dc..0000000000000
--- a/src/dev/build/lib/__tests__/build.js
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import expect from '@kbn/expect';
-import sinon from 'sinon';
-
-import { createBuild } from '../build';
-
-describe('dev/build/lib/build', () => {
- describe('Build instance', () => {
- describe('#isOss()', () => {
- it('returns true if passed oss: true', () => {
- const build = createBuild({
- oss: true,
- });
-
- expect(build.isOss()).to.be(true);
- });
-
- it('returns false if passed oss: false', () => {
- const build = createBuild({
- oss: false,
- });
-
- expect(build.isOss()).to.be(false);
- });
- });
-
- describe('#getName()', () => {
- it('returns kibana when oss: false', () => {
- const build = createBuild({
- oss: false,
- });
-
- expect(build.getName()).to.be('kibana');
- });
- it('returns kibana-oss when oss: true', () => {
- const build = createBuild({
- oss: true,
- });
-
- expect(build.getName()).to.be('kibana-oss');
- });
- });
-
- describe('#getLogTag()', () => {
- it('returns string with build name in it', () => {
- const build = createBuild({});
-
- expect(build.getLogTag()).to.contain(build.getName());
- });
- });
-
- describe('#resolvePath()', () => {
- it('uses passed config to resolve a path relative to the build', () => {
- const resolveFromRepo = sinon.stub();
- const build = createBuild({
- config: { resolveFromRepo },
- });
-
- build.resolvePath('bar');
- sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar');
- });
-
- it('passes all arguments to config.resolveFromRepo()', () => {
- const resolveFromRepo = sinon.stub();
- const build = createBuild({
- config: { resolveFromRepo },
- });
-
- build.resolvePath('bar', 'baz', 'box');
- sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar', 'baz', 'box');
- });
- });
-
- describe('#resolvePathForPlatform()', () => {
- it('uses config.resolveFromRepo(), config.getBuildVersion(), and platform.getBuildName() to create path', () => {
- const resolveFromRepo = sinon.stub();
- const getBuildVersion = sinon.stub().returns('buildVersion');
- const build = createBuild({
- oss: true,
- config: { resolveFromRepo, getBuildVersion },
- });
-
- const getBuildName = sinon.stub().returns('platformName');
- const platform = {
- getBuildName,
- };
-
- build.resolvePathForPlatform(platform, 'foo', 'bar');
- sinon.assert.calledWithExactly(getBuildName);
- sinon.assert.calledWithExactly(getBuildVersion);
- sinon.assert.calledWithExactly(
- resolveFromRepo,
- 'build',
- 'oss',
- `kibana-buildVersion-platformName`,
- 'foo',
- 'bar'
- );
- });
- });
-
- describe('#getPlatformArchivePath()', () => {
- const sandbox = sinon.createSandbox();
-
- const config = {
- resolveFromRepo: sandbox.stub(),
- getBuildVersion: sandbox.stub().returns('buildVersion'),
- };
-
- const build = createBuild({
- oss: false,
- config,
- });
-
- const platform = {
- getBuildName: sandbox.stub().returns('platformName'),
- isWindows: sandbox.stub().returns(false),
- };
-
- beforeEach(() => {
- sandbox.resetHistory();
- });
-
- it('uses config.resolveFromRepo(), config.getBuildVersion, and platform.getBuildName() to create path', () => {
- build.getPlatformArchivePath(platform);
- sinon.assert.calledWithExactly(platform.getBuildName);
- sinon.assert.calledWithExactly(platform.isWindows);
- sinon.assert.calledWithExactly(config.getBuildVersion);
- sinon.assert.calledWithExactly(
- config.resolveFromRepo,
- 'target',
- `kibana-buildVersion-platformName.tar.gz`
- );
- });
-
- it('creates .zip path if platform is windows', () => {
- platform.isWindows.returns(true);
- build.getPlatformArchivePath(platform);
- sinon.assert.calledWithExactly(platform.getBuildName);
- sinon.assert.calledWithExactly(platform.isWindows);
- sinon.assert.calledWithExactly(config.getBuildVersion);
- sinon.assert.calledWithExactly(
- config.resolveFromRepo,
- 'target',
- `kibana-buildVersion-platformName.zip`
- );
- });
- });
- });
-});
diff --git a/src/dev/build/lib/__tests__/config.js b/src/dev/build/lib/__tests__/config.js
deleted file mode 100644
index 9544fc84dc6ff..0000000000000
--- a/src/dev/build/lib/__tests__/config.js
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { resolve } from 'path';
-
-import expect from '@kbn/expect';
-
-import pkg from '../../../../../package.json';
-import { getConfig } from '../config';
-import { getVersionInfo } from '../version_info';
-
-describe('dev/build/lib/config', () => {
- const setup = async function ({ targetAllPlatforms = true } = {}) {
- const isRelease = Boolean(Math.round(Math.random()));
- const config = await getConfig({
- isRelease,
- targetAllPlatforms,
- });
- const buildInfo = await getVersionInfo({
- isRelease,
- pkg,
- });
- return { config, buildInfo };
- };
-
- describe('#getKibanaPkg()', () => {
- it('returns the parsed package.json from the Kibana repo', async () => {
- const { config } = await setup();
- expect(config.getKibanaPkg()).to.eql(pkg);
- });
- });
-
- describe('#getNodeVersion()', () => {
- it('returns the node version from the kibana package.json', async () => {
- const { config } = await setup();
- expect(config.getNodeVersion()).to.eql(pkg.engines.node);
- });
- });
-
- describe('#getRepoRelativePath()', () => {
- it('converts an absolute path to relative path, from the root of the repo', async () => {
- const { config } = await setup();
- expect(config.getRepoRelativePath(__dirname)).to.match(/^src[\/\\]dev[\/\\]build/);
- });
- });
-
- describe('#resolveFromRepo()', () => {
- it('resolves a relative path', async () => {
- const { config } = await setup();
- expect(config.resolveFromRepo('src/dev/build/lib/__tests__')).to.be(__dirname);
- });
-
- it('resolves a series of relative paths', async () => {
- const { config } = await setup();
- expect(config.resolveFromRepo('src', 'dev', 'build', 'lib', '__tests__')).to.be(__dirname);
- });
- });
-
- describe('#getPlatform()', () => {
- it('throws error when platform does not exist', async () => {
- const { config } = await setup();
- const fn = () => config.getPlatform('foo', 'x64');
-
- expect(fn).to.throwException(/Unable to find platform/);
- });
-
- it('throws error when architecture does not exist', async () => {
- const { config } = await setup();
- const fn = () => config.getPlatform('linux', 'foo');
-
- expect(fn).to.throwException(/Unable to find platform/);
- });
- });
-
- describe('#getTargetPlatforms()', () => {
- it('returns an array of all platform objects', async () => {
- const { config } = await setup();
- expect(
- config
- .getTargetPlatforms()
- .map((p) => p.getNodeArch())
- .sort()
- ).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
- });
-
- it('returns just this platform when targetAllPlatforms = false', async () => {
- const { config } = await setup({ targetAllPlatforms: false });
- const platforms = config.getTargetPlatforms();
-
- expect(platforms).to.be.an('array');
- expect(platforms).to.have.length(1);
- expect(platforms[0]).to.be(config.getPlatformForThisOs());
- });
- });
-
- describe('#getNodePlatforms()', () => {
- it('returns all platforms', async () => {
- const { config } = await setup();
- expect(
- config
- .getTargetPlatforms()
- .map((p) => p.getNodeArch())
- .sort()
- ).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
- });
-
- it('returns this platform and linux, when targetAllPlatforms = false', async () => {
- const { config } = await setup({ targetAllPlatforms: false });
- const platforms = config.getNodePlatforms();
- expect(platforms).to.be.an('array');
- if (process.platform !== 'linux') {
- expect(platforms).to.have.length(2);
- expect(platforms[0]).to.be(config.getPlatformForThisOs());
- expect(platforms[1]).to.be(config.getPlatform('linux', 'x64'));
- } else {
- expect(platforms).to.have.length(1);
- expect(platforms[0]).to.be(config.getPlatform('linux', 'x64'));
- }
- });
- });
-
- describe('#getPlatformForThisOs()', () => {
- it('returns the platform that matches the arch of this machine', async () => {
- const { config } = await setup();
- const currentPlatform = config.getPlatformForThisOs();
- expect(currentPlatform.getName()).to.be(process.platform);
- expect(currentPlatform.getArchitecture()).to.be(process.arch);
- });
- });
-
- describe('#getBuildVersion()', () => {
- it('returns the version from the build info', async () => {
- const { config, buildInfo } = await setup();
- expect(config.getBuildVersion()).to.be(buildInfo.buildVersion);
- });
- });
-
- describe('#getBuildNumber()', () => {
- it('returns the number from the build info', async () => {
- const { config, buildInfo } = await setup();
- expect(config.getBuildNumber()).to.be(buildInfo.buildNumber);
- });
- });
-
- describe('#getBuildSha()', () => {
- it('returns the sha from the build info', async () => {
- const { config, buildInfo } = await setup();
- expect(config.getBuildSha()).to.be(buildInfo.buildSha);
- });
- });
-
- describe('#resolveFromTarget()', () => {
- it('resolves a relative path, from the target directory', async () => {
- const { config } = await setup();
- expect(config.resolveFromTarget()).to.be(resolve(__dirname, '../../../../../target'));
- });
- });
-});
diff --git a/src/dev/build/lib/__tests__/download.js b/src/dev/build/lib/__tests__/download.js
deleted file mode 100644
index 49cb9caaaf4ec..0000000000000
--- a/src/dev/build/lib/__tests__/download.js
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { createServer } from 'http';
-import { join } from 'path';
-import { tmpdir } from 'os';
-import { mkdirp, readFileSync } from 'fs-extra';
-
-import del from 'del';
-import sinon from 'sinon';
-import { CI_PARALLEL_PROCESS_PREFIX } from '@kbn/test';
-import expect from '@kbn/expect';
-import Wreck from '@hapi/wreck';
-
-import { ToolingLog } from '@kbn/dev-utils';
-import { download } from '../download';
-
-const getTempFolder = async () => {
- const dir = join(tmpdir(), CI_PARALLEL_PROCESS_PREFIX, 'download-js-test-tmp-dir');
- console.log(dir);
- await mkdirp(dir);
- return dir;
-};
-
-describe('src/dev/build/tasks/nodejs/download', () => {
- const sandbox = sinon.createSandbox();
- let TMP_DESTINATION;
- let TMP_DIR;
-
- beforeEach(async () => {
- TMP_DIR = await getTempFolder();
- TMP_DESTINATION = join(TMP_DIR, '__tmp_download_js_test_file__');
- });
-
- afterEach(async () => {
- await del(TMP_DIR, { force: true });
- });
- afterEach(() => sandbox.reset());
-
- const onLogLine = sandbox.stub();
- const log = new ToolingLog({
- level: 'verbose',
- writeTo: {
- write: onLogLine,
- },
- });
-
- const FOO_SHA256 = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
- const createSendHandler = (send) => (req, res) => {
- res.statusCode = 200;
- res.end(send);
- };
- const sendErrorHandler = (req, res) => {
- res.statusCode = 500;
- res.end();
- };
-
- let server;
- let serverUrl;
- let nextHandler;
- afterEach(() => (nextHandler = null));
-
- before(async () => {
- server = createServer((req, res) => {
- if (!nextHandler) {
- nextHandler = sendErrorHandler;
- }
-
- const handler = nextHandler;
- nextHandler = null;
- handler(req, res);
- });
-
- await Promise.race([
- new Promise((resolve, reject) => {
- server.once('error', reject);
- }),
- new Promise((resolve) => {
- server.listen(resolve);
- }),
- ]);
-
- serverUrl = `http://localhost:${server.address().port}/`;
- });
-
- after(async () => {
- server.close();
- server = null;
- });
-
- it('downloads from URL and checks that content matches sha256', async () => {
- nextHandler = createSendHandler('foo');
- await download({
- log,
- url: serverUrl,
- destination: TMP_DESTINATION,
- sha256: FOO_SHA256,
- });
- expect(readFileSync(TMP_DESTINATION, 'utf8')).to.be('foo');
- });
-
- it('rejects and deletes destination if sha256 does not match', async () => {
- nextHandler = createSendHandler('foo');
-
- try {
- await download({
- log,
- url: serverUrl,
- destination: TMP_DESTINATION,
- sha256: 'bar',
- });
- throw new Error('Expected download() to reject');
- } catch (error) {
- expect(error)
- .to.have.property('message')
- .contain('does not match the expected sha256 checksum');
- }
-
- try {
- readFileSync(TMP_DESTINATION);
- throw new Error('Expected download to be deleted');
- } catch (error) {
- expect(error).to.have.property('code', 'ENOENT');
- }
- });
-
- describe('reties download retries: number of times', () => {
- it('resolves if retries = 1 and first attempt fails', async () => {
- let reqCount = 0;
- nextHandler = function sequenceHandler(req, res) {
- switch (++reqCount) {
- case 1:
- nextHandler = sequenceHandler;
- return sendErrorHandler(req, res);
- default:
- return createSendHandler('foo')(req, res);
- }
- };
-
- await download({
- log,
- url: serverUrl,
- destination: TMP_DESTINATION,
- sha256: FOO_SHA256,
- retries: 2,
- });
-
- expect(readFileSync(TMP_DESTINATION, 'utf8')).to.be('foo');
- });
-
- it('resolves if first fails, second is bad shasum, but third succeeds', async () => {
- let reqCount = 0;
- nextHandler = function sequenceHandler(req, res) {
- switch (++reqCount) {
- case 1:
- nextHandler = sequenceHandler;
- return sendErrorHandler(req, res);
- case 2:
- nextHandler = sequenceHandler;
- return createSendHandler('bar')(req, res);
- default:
- return createSendHandler('foo')(req, res);
- }
- };
-
- await download({
- log,
- url: serverUrl,
- destination: TMP_DESTINATION,
- sha256: FOO_SHA256,
- retries: 2,
- });
- });
-
- it('makes 6 requests if `retries: 5` and all failed', async () => {
- let reqCount = 0;
- nextHandler = function sequenceHandler(req, res) {
- reqCount += 1;
- nextHandler = sequenceHandler;
- sendErrorHandler(req, res);
- };
-
- try {
- await download({
- log,
- url: serverUrl,
- destination: TMP_DESTINATION,
- sha256: FOO_SHA256,
- retries: 5,
- });
- throw new Error('Expected download() to reject');
- } catch (error) {
- expect(error).to.have.property('message').contain('Request failed with status code 500');
- expect(reqCount).to.be(6);
- }
- });
- });
-
- describe('sha256 option not supplied', () => {
- before(() => {
- sinon.stub(Wreck, 'request');
- });
- after(() => {
- Wreck.request.restore();
- });
-
- it('refuses to download', async () => {
- try {
- await download({
- log,
- url: 'http://google.com',
- destination: TMP_DESTINATION,
- });
-
- throw new Error('expected download() to reject');
- } catch (error) {
- expect(error).to.have.property('message').contain('refusing to download');
- }
- });
- });
-});
diff --git a/src/dev/build/lib/__tests__/exec.js b/src/dev/build/lib/__tests__/exec.js
deleted file mode 100644
index 8e122c65132ac..0000000000000
--- a/src/dev/build/lib/__tests__/exec.js
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import sinon from 'sinon';
-import stripAnsi from 'strip-ansi';
-
-import { ToolingLog } from '@kbn/dev-utils';
-import { exec } from '../exec';
-
-describe('dev/build/lib/exec', () => {
- const sandbox = sinon.createSandbox();
- afterEach(() => sandbox.reset());
-
- const onLogLine = sandbox.stub();
- const log = new ToolingLog({
- level: 'verbose',
- writeTo: {
- write: (chunk) => {
- onLogLine(stripAnsi(chunk));
- },
- },
- });
-
- it('executes a command, logs the command, and logs the output', async () => {
- await exec(log, process.execPath, ['-e', 'console.log("hi")']);
-
- // logs the command before execution
- sinon.assert.calledWithExactly(onLogLine, sinon.match(`$ ${process.execPath}`));
-
- // log output of the process
- sinon.assert.calledWithExactly(onLogLine, sinon.match(/debg\s+hi/));
- });
-
- it('logs using level: option', async () => {
- await exec(log, process.execPath, ['-e', 'console.log("hi")'], {
- level: 'info',
- });
-
- // log output of the process
- sinon.assert.calledWithExactly(onLogLine, sinon.match(/info\s+hi/));
- });
-});
diff --git a/src/dev/build/lib/__tests__/fs.js b/src/dev/build/lib/__tests__/fs.js
deleted file mode 100644
index bf7596b012f79..0000000000000
--- a/src/dev/build/lib/__tests__/fs.js
+++ /dev/null
@@ -1,362 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { resolve } from 'path';
-import { chmodSync, statSync } from 'fs';
-
-import del from 'del';
-import expect from '@kbn/expect';
-
-import { mkdirp, write, read, getChildPaths, copyAll, getFileHash, untar, gunzip } from '../fs';
-
-const TMP = resolve(__dirname, '__tmp__');
-const FIXTURES = resolve(__dirname, 'fixtures');
-const FOO_TAR_PATH = resolve(FIXTURES, 'foo_dir.tar.gz');
-const FOO_GZIP_PATH = resolve(FIXTURES, 'foo.txt.gz');
-const BAR_TXT_PATH = resolve(FIXTURES, 'foo_dir/bar.txt');
-const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable');
-
-const isWindows = /^win/.test(process.platform);
-
-// get the mode of a file as a string, like 777, or 644,
-function getCommonMode(path) {
- return statSync(path).mode.toString(8).slice(-3);
-}
-
-function assertNonAbsoluteError(error) {
- expect(error).to.be.an(Error);
- expect(error.message).to.contain('Please use absolute paths');
-}
-
-describe('dev/build/lib/fs', () => {
- // ensure WORLD_EXECUTABLE is actually executable by all
- before(async () => {
- chmodSync(WORLD_EXECUTABLE, 0o777);
- });
-
- // clean and recreate TMP directory
- beforeEach(async () => {
- await del(TMP);
- await mkdirp(TMP);
- });
-
- // cleanup TMP directory
- after(async () => {
- await del(TMP);
- });
-
- describe('mkdirp()', () => {
- it('rejects if path is not absolute', async () => {
- try {
- await mkdirp('foo/bar');
- throw new Error('Expected mkdirp() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('makes directory and necessary parent directories', async () => {
- const destination = resolve(TMP, 'a/b/c/d/e/f/g');
-
- expect(await mkdirp(destination)).to.be(undefined);
-
- expect(statSync(destination).isDirectory()).to.be(true);
- });
- });
-
- describe('write()', () => {
- it('rejects if path is not absolute', async () => {
- try {
- await write('foo/bar');
- throw new Error('Expected write() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('writes content to a file with existing parent directory', async () => {
- const destination = resolve(TMP, 'a');
-
- expect(await write(destination, 'bar')).to.be(undefined);
- expect(await read(destination)).to.be('bar');
- });
-
- it('writes content to a file with missing parents', async () => {
- const destination = resolve(TMP, 'a/b/c/d/e');
-
- expect(await write(destination, 'bar')).to.be(undefined);
- expect(await read(destination)).to.be('bar');
- });
- });
-
- describe('read()', () => {
- it('rejects if path is not absolute', async () => {
- try {
- await read('foo/bar');
- throw new Error('Expected read() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('reads file, resolves with result', async () => {
- expect(await read(BAR_TXT_PATH)).to.be('bar\n');
- });
- });
-
- describe('getChildPaths()', () => {
- it('rejects if path is not absolute', async () => {
- try {
- await getChildPaths('foo/bar');
- throw new Error('Expected getChildPaths() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('resolves with absolute paths to the children of directory', async () => {
- const path = resolve(FIXTURES, 'foo_dir');
- expect((await getChildPaths(path)).sort()).to.eql([
- resolve(FIXTURES, 'foo_dir/.bar'),
- BAR_TXT_PATH,
- resolve(FIXTURES, 'foo_dir/foo'),
- ]);
- });
-
- it('rejects with ENOENT if path does not exist', async () => {
- try {
- await getChildPaths(resolve(FIXTURES, 'notrealpath'));
- throw new Error('Expected getChildPaths() to reject');
- } catch (error) {
- expect(error).to.have.property('code', 'ENOENT');
- }
- });
- });
-
- describe('copyAll()', () => {
- it('rejects if source path is not absolute', async () => {
- try {
- await copyAll('foo/bar', __dirname);
- throw new Error('Expected copyAll() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('rejects if destination path is not absolute', async () => {
- try {
- await copyAll(__dirname, 'foo/bar');
- throw new Error('Expected copyAll() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('rejects if neither path is not absolute', async () => {
- try {
- await copyAll('foo/bar', 'foo/bar');
- throw new Error('Expected copyAll() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('copies files and directories from source to dest, creating dest if necessary, respecting mode', async () => {
- const destination = resolve(TMP, 'a/b/c');
- await copyAll(FIXTURES, destination);
-
- expect((await getChildPaths(resolve(destination, 'foo_dir'))).sort()).to.eql([
- resolve(destination, 'foo_dir/bar.txt'),
- resolve(destination, 'foo_dir/foo'),
- ]);
-
- expect(getCommonMode(resolve(destination, 'bin/world_executable'))).to.be(
- isWindows ? '666' : '777'
- );
- expect(getCommonMode(resolve(destination, 'foo_dir/bar.txt'))).to.be(
- isWindows ? '666' : '644'
- );
- });
-
- it('applies select globs if specified, ignores dot files', async () => {
- const destination = resolve(TMP, 'a/b/c/d');
- await copyAll(FIXTURES, destination, {
- select: ['**/*bar*'],
- });
-
- try {
- statSync(resolve(destination, 'bin/world_executable'));
- throw new Error('expected bin/world_executable to not by copied');
- } catch (error) {
- expect(error).to.have.property('code', 'ENOENT');
- }
-
- try {
- statSync(resolve(destination, 'foo_dir/.bar'));
- throw new Error('expected foo_dir/.bar to not by copied');
- } catch (error) {
- expect(error).to.have.property('code', 'ENOENT');
- }
-
- expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
- });
-
- it('supports select globs and dot option together', async () => {
- const destination = resolve(TMP, 'a/b/c/d');
- await copyAll(FIXTURES, destination, {
- select: ['**/*bar*'],
- dot: true,
- });
-
- try {
- statSync(resolve(destination, 'bin/world_executable'));
- throw new Error('expected bin/world_executable to not by copied');
- } catch (error) {
- expect(error).to.have.property('code', 'ENOENT');
- }
-
- expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
- expect(await read(resolve(destination, 'foo_dir/.bar'))).to.be('dotfile\n');
- });
-
- it('supports atime and mtime', async () => {
- const destination = resolve(TMP, 'a/b/c/d/e');
- const time = new Date(1425298511000);
- await copyAll(FIXTURES, destination, {
- time,
- });
- const barTxt = statSync(resolve(destination, 'foo_dir/bar.txt'));
- const fooDir = statSync(resolve(destination, 'foo_dir'));
-
- // precision is platform specific
- const oneDay = 86400000;
- expect(Math.abs(barTxt.atimeMs - time.getTime())).to.be.below(oneDay);
- expect(Math.abs(fooDir.atimeMs - time.getTime())).to.be.below(oneDay);
- expect(Math.abs(barTxt.mtimeMs - time.getTime())).to.be.below(oneDay);
- });
- });
-
- describe('getFileHash()', () => {
- it('rejects if path is not absolute', async () => {
- try {
- await getFileHash('foo/bar');
- throw new Error('Expected getFileHash() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('resolves with the sha1 hash of a file', async () => {
- expect(await getFileHash(BAR_TXT_PATH, 'sha1')).to.be(
- 'e242ed3bffccdf271b7fbaf34ed72d089537b42f'
- );
- });
- it('resolves with the sha256 hash of a file', async () => {
- expect(await getFileHash(BAR_TXT_PATH, 'sha256')).to.be(
- '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730'
- );
- });
- it('resolves with the md5 hash of a file', async () => {
- expect(await getFileHash(BAR_TXT_PATH, 'md5')).to.be('c157a79031e1c40f85931829bc5fc552');
- });
- });
-
- describe('untar()', () => {
- it('rejects if source path is not absolute', async () => {
- try {
- await untar('foo/bar', '**/*', __dirname);
- throw new Error('Expected untar() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('rejects if destination path is not absolute', async () => {
- try {
- await untar(__dirname, '**/*', 'foo/bar');
- throw new Error('Expected untar() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('rejects if neither path is not absolute', async () => {
- try {
- await untar('foo/bar', '**/*', 'foo/bar');
- throw new Error('Expected untar() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('extracts tarbar from source into destination, creating destination if necessary', async () => {
- const destination = resolve(TMP, 'a/b/c/d/e/f');
- await untar(FOO_TAR_PATH, destination);
- expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
- expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).to.be('foo\n');
- });
-
- it('passed thrid argument to Extract class, overriding path with destination', async () => {
- const destination = resolve(TMP, 'a/b/c');
-
- await untar(FOO_TAR_PATH, destination, {
- path: '/dev/null',
- strip: 1,
- });
-
- expect(await read(resolve(destination, 'bar.txt'))).to.be('bar\n');
- expect(await read(resolve(destination, 'foo/foo.txt'))).to.be('foo\n');
- });
- });
-
- describe('gunzip()', () => {
- it('rejects if source path is not absolute', async () => {
- try {
- await gunzip('foo/bar', '**/*', __dirname);
- throw new Error('Expected gunzip() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('rejects if destination path is not absolute', async () => {
- try {
- await gunzip(__dirname, '**/*', 'foo/bar');
- throw new Error('Expected gunzip() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('rejects if neither path is not absolute', async () => {
- try {
- await gunzip('foo/bar', '**/*', 'foo/bar');
- throw new Error('Expected gunzip() to reject');
- } catch (error) {
- assertNonAbsoluteError(error);
- }
- });
-
- it('extracts gzip from source into destination, creating destination if necessary', async () => {
- const destination = resolve(TMP, 'z/y/x/v/u/t/foo.txt');
- await gunzip(FOO_GZIP_PATH, destination);
- expect(await read(resolve(destination))).to.be('foo\n');
- });
- });
-});
diff --git a/src/dev/build/lib/__tests__/platform.js b/src/dev/build/lib/__tests__/platform.js
deleted file mode 100644
index a7bb5670ee412..0000000000000
--- a/src/dev/build/lib/__tests__/platform.js
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import expect from '@kbn/expect';
-
-import { createPlatform } from '../platform';
-
-describe('src/dev/build/lib/platform', () => {
- describe('getName()', () => {
- it('returns the name argument', () => {
- expect(createPlatform('foo').getName()).to.be('foo');
- });
- });
-
- describe('getNodeArch()', () => {
- it('returns the node arch for the passed name', () => {
- expect(createPlatform('win32', 'x64').getNodeArch()).to.be('win32-x64');
- });
- });
-
- describe('getBuildName()', () => {
- it('returns the build name for the passed name', () => {
- expect(createPlatform('linux', 'arm64', 'linux-aarch64').getBuildName()).to.be(
- 'linux-aarch64'
- );
- });
- });
-
- describe('isWindows()', () => {
- it('returns true if name is win32', () => {
- expect(createPlatform('win32', 'x64').isWindows()).to.be(true);
- expect(createPlatform('linux', 'x64').isWindows()).to.be(false);
- expect(createPlatform('darwin', 'x64').isWindows()).to.be(false);
- });
- });
-
- describe('isLinux()', () => {
- it('returns true if name is linux', () => {
- expect(createPlatform('win32', 'x64').isLinux()).to.be(false);
- expect(createPlatform('linux', 'x64').isLinux()).to.be(true);
- expect(createPlatform('darwin', 'x64').isLinux()).to.be(false);
- });
- });
-
- describe('isMac()', () => {
- it('returns true if name is darwin', () => {
- expect(createPlatform('win32', 'x64').isMac()).to.be(false);
- expect(createPlatform('linux', 'x64').isMac()).to.be(false);
- expect(createPlatform('darwin', 'x64').isMac()).to.be(true);
- });
- });
-});
diff --git a/src/dev/build/lib/__tests__/runner.js b/src/dev/build/lib/__tests__/runner.js
deleted file mode 100644
index 314c2dd45d50f..0000000000000
--- a/src/dev/build/lib/__tests__/runner.js
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import sinon from 'sinon';
-import expect from '@kbn/expect';
-
-import { ToolingLog } from '@kbn/dev-utils';
-import { createRunner } from '../runner';
-import { isErrorLogged, markErrorLogged } from '../errors';
-
-describe('dev/build/lib/runner', () => {
- const sandbox = sinon.createSandbox();
-
- const config = {};
-
- const onLogLine = sandbox.stub();
- const log = new ToolingLog({
- level: 'verbose',
- writeTo: {
- write: onLogLine,
- },
- });
-
- const buildMatcher = sinon.match({
- isOss: sinon.match.func,
- resolvePath: sinon.match.func,
- resolvePathForPlatform: sinon.match.func,
- getPlatformArchivePath: sinon.match.func,
- getName: sinon.match.func,
- getLogTag: sinon.match.func,
- });
-
- const ossBuildMatcher = buildMatcher.and(sinon.match((b) => b.isOss(), 'is oss build'));
- const defaultBuildMatcher = buildMatcher.and(sinon.match((b) => !b.isOss(), 'is not oss build'));
-
- afterEach(() => sandbox.reset());
-
- describe('defaults', () => {
- const run = createRunner({
- config,
- log,
- });
-
- it('returns a promise', () => {
- expect(run({ run: sinon.stub() })).to.be.a(Promise);
- });
-
- it('runs global task once, passing config and log', async () => {
- const runTask = sinon.stub();
- await run({ global: true, run: runTask });
- sinon.assert.calledOnce(runTask);
- sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
- });
-
- it('does not call local tasks', async () => {
- const runTask = sinon.stub();
- await run({ run: runTask });
- sinon.assert.notCalled(runTask);
- });
- });
-
- describe('buildOssDist = true, buildDefaultDist = true', () => {
- const run = createRunner({
- config,
- log,
- buildOssDist: true,
- buildDefaultDist: true,
- });
-
- it('runs global task once, passing config and log', async () => {
- const runTask = sinon.stub();
- await run({ global: true, run: runTask });
- sinon.assert.calledOnce(runTask);
- sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
- });
-
- it('runs local tasks twice, passing config log and both builds', async () => {
- const runTask = sinon.stub();
- await run({ run: runTask });
- sinon.assert.calledTwice(runTask);
- sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher);
- sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher);
- });
- });
-
- describe('just default dist', () => {
- const run = createRunner({
- config,
- log,
- buildDefaultDist: true,
- });
-
- it('runs global task once, passing config and log', async () => {
- const runTask = sinon.stub();
- await run({ global: true, run: runTask });
- sinon.assert.calledOnce(runTask);
- sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
- });
-
- it('runs local tasks once, passing config log and default build', async () => {
- const runTask = sinon.stub();
- await run({ run: runTask });
- sinon.assert.calledOnce(runTask);
- sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher);
- });
- });
-
- describe('just oss dist', () => {
- const run = createRunner({
- config,
- log,
- buildOssDist: true,
- });
-
- it('runs global task once, passing config and log', async () => {
- const runTask = sinon.stub();
- await run({ global: true, run: runTask });
- sinon.assert.calledOnce(runTask);
- sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
- });
-
- it('runs local tasks once, passing config log and oss build', async () => {
- const runTask = sinon.stub();
- await run({ run: runTask });
- sinon.assert.calledOnce(runTask);
- sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher);
- });
- });
-
- describe('task rejects', () => {
- const run = createRunner({
- config,
- log,
- buildOssDist: true,
- });
-
- it('rejects, logs error, and marks error logged', async () => {
- try {
- await run({
- async run() {
- throw new Error('FOO');
- },
- });
- throw new Error('expected run() to reject');
- } catch (error) {
- expect(error).to.have.property('message').be('FOO');
- sinon.assert.calledWith(onLogLine, sinon.match(/FOO/));
- expect(isErrorLogged(error)).to.be(true);
- }
- });
-
- it('just rethrows errors that have already been logged', async () => {
- try {
- await run({
- async run() {
- throw markErrorLogged(new Error('FOO'));
- },
- });
-
- throw new Error('expected run() to reject');
- } catch (error) {
- expect(error).to.have.property('message').be('FOO');
- sinon.assert.neverCalledWith(onLogLine, sinon.match(/FOO/));
- expect(isErrorLogged(error)).to.be(true);
- }
- });
- });
-});
diff --git a/src/dev/build/lib/__tests__/version_info.js b/src/dev/build/lib/__tests__/version_info.js
deleted file mode 100644
index a7329642e4f9a..0000000000000
--- a/src/dev/build/lib/__tests__/version_info.js
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import expect from '@kbn/expect';
-
-import pkg from '../../../../../package.json';
-import { getVersionInfo } from '../version_info';
-
-describe('dev/build/lib/version_info', () => {
- describe('isRelease = true', () => {
- it('returns unchanged package.version, build sha, and build number', async () => {
- const versionInfo = await getVersionInfo({
- isRelease: true,
- pkg,
- });
-
- expect(versionInfo).to.have.property('buildVersion', pkg.version);
- expect(versionInfo)
- .to.have.property('buildSha')
- .match(/^[0-9a-f]{40}$/);
- expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000);
- });
- });
- describe('isRelease = false', () => {
- it('returns snapshot version, build sha, and build number', async () => {
- const versionInfo = await getVersionInfo({
- isRelease: false,
- pkg,
- });
-
- expect(versionInfo)
- .to.have.property('buildVersion')
- .contain(pkg.version)
- .match(/-SNAPSHOT$/);
- expect(versionInfo)
- .to.have.property('buildSha')
- .match(/^[0-9a-f]{40}$/);
- expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000);
- });
- });
-
- describe('versionQualifier', () => {
- it('appends a version qualifier', async () => {
- const versionInfo = await getVersionInfo({
- isRelease: true,
- versionQualifier: 'beta55',
- pkg,
- });
- expect(versionInfo)
- .to.have.property('buildVersion')
- .be(pkg.version + '-beta55');
- });
- });
-});
diff --git a/src/dev/build/lib/build.js b/src/dev/build/lib/build.js
deleted file mode 100644
index fe5111ad1377a..0000000000000
--- a/src/dev/build/lib/build.js
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import chalk from 'chalk';
-
-export function createBuild({ config, oss }) {
- const name = oss ? 'kibana-oss' : 'kibana';
- const logTag = oss ? chalk`{magenta [kibana-oss]}` : chalk`{cyan [ kibana ]}`;
-
- return new (class Build {
- isOss() {
- return !!oss;
- }
-
- resolvePath(...args) {
- return config.resolveFromRepo('build', name, ...args);
- }
-
- resolvePathForPlatform(platform, ...args) {
- return config.resolveFromRepo(
- 'build',
- oss ? 'oss' : 'default',
- `kibana-${config.getBuildVersion()}-${platform.getBuildName()}`,
- ...args
- );
- }
-
- getPlatformArchivePath(platform) {
- const ext = platform.isWindows() ? 'zip' : 'tar.gz';
- return config.resolveFromRepo(
- 'target',
- `${name}-${config.getBuildVersion()}-${platform.getBuildName()}.${ext}`
- );
- }
-
- getName() {
- return name;
- }
-
- getLogTag() {
- return logTag;
- }
- })();
-}
diff --git a/src/dev/build/lib/build.test.ts b/src/dev/build/lib/build.test.ts
new file mode 100644
index 0000000000000..9fdf21cee6567
--- /dev/null
+++ b/src/dev/build/lib/build.test.ts
@@ -0,0 +1,120 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
+
+import { Config } from './config';
+import { Build } from './build';
+
+expect.addSnapshotSerializer(createAbsolutePathSerializer());
+
+const config = new Config(
+ true,
+ {
+ version: '8.0.0',
+ engines: {
+ node: '*',
+ },
+ workspaces: {
+ packages: [],
+ },
+ },
+ '1.2.3',
+ REPO_ROOT,
+ {
+ buildNumber: 1234,
+ buildSha: 'abcd1234',
+ buildVersion: '8.0.0',
+ },
+ true
+);
+
+const linuxPlatform = config.getPlatform('linux', 'x64');
+const linuxArmPlatform = config.getPlatform('linux', 'arm64');
+const windowsPlatform = config.getPlatform('win32', 'x64');
+
+beforeEach(() => {
+ jest.clearAllMocks();
+});
+
+const ossBuild = new Build(config, true);
+const defaultBuild = new Build(config, false);
+
+describe('#isOss()', () => {
+ it('returns true for oss', () => {
+ expect(ossBuild.isOss()).toBe(true);
+ });
+
+ it('returns false for default build', () => {
+ expect(defaultBuild.isOss()).toBe(false);
+ });
+});
+
+describe('#getName()', () => {
+ it('returns kibana for default build', () => {
+ expect(defaultBuild.getName()).toBe('kibana');
+ });
+
+ it('returns kibana-oss for oss', () => {
+ expect(ossBuild.getName()).toBe('kibana-oss');
+ });
+});
+
+describe('#getLogTag()', () => {
+ it('returns string with build name in it', () => {
+ expect(defaultBuild.getLogTag()).toContain(defaultBuild.getName());
+ expect(ossBuild.getLogTag()).toContain(ossBuild.getName());
+ });
+});
+
+describe('#resolvePath()', () => {
+ it('uses passed config to resolve a path relative to the repo', () => {
+ expect(ossBuild.resolvePath('bar')).toMatchInlineSnapshot(
+ `/build/kibana-oss/bar`
+ );
+ });
+
+ it('passes all arguments to config.resolveFromRepo()', () => {
+ expect(defaultBuild.resolvePath('bar', 'baz', 'box')).toMatchInlineSnapshot(
+ `/build/kibana/bar/baz/box`
+ );
+ });
+});
+
+describe('#resolvePathForPlatform()', () => {
+ it('uses config.resolveFromRepo(), config.getBuildVersion(), and platform.getBuildName() to create path', () => {
+ expect(ossBuild.resolvePathForPlatform(linuxPlatform, 'foo', 'bar')).toMatchInlineSnapshot(
+ `/build/oss/kibana-8.0.0-linux-x86_64/foo/bar`
+ );
+ });
+});
+
+describe('#getPlatformArchivePath()', () => {
+ it('creates correct path for different platforms', () => {
+ expect(ossBuild.getPlatformArchivePath(linuxPlatform)).toMatchInlineSnapshot(
+ `/target/kibana-oss-8.0.0-linux-x86_64.tar.gz`
+ );
+ expect(ossBuild.getPlatformArchivePath(linuxArmPlatform)).toMatchInlineSnapshot(
+ `/target/kibana-oss-8.0.0-linux-aarch64.tar.gz`
+ );
+ expect(ossBuild.getPlatformArchivePath(windowsPlatform)).toMatchInlineSnapshot(
+ `/target/kibana-oss-8.0.0-windows-x86_64.zip`
+ );
+ });
+});
diff --git a/src/dev/build/lib/build.ts b/src/dev/build/lib/build.ts
new file mode 100644
index 0000000000000..d0b03b4c5e4b2
--- /dev/null
+++ b/src/dev/build/lib/build.ts
@@ -0,0 +1,63 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import chalk from 'chalk';
+
+import { Config } from './config';
+import { Platform } from './platform';
+
+export class Build {
+ private name = this.oss ? 'kibana-oss' : 'kibana';
+ private logTag = this.oss ? chalk`{magenta [kibana-oss]}` : chalk`{cyan [ kibana ]}`;
+
+ constructor(private config: Config, private oss: boolean) {}
+
+ isOss() {
+ return !!this.oss;
+ }
+
+ resolvePath(...args: string[]) {
+ return this.config.resolveFromRepo('build', this.name, ...args);
+ }
+
+ resolvePathForPlatform(platform: Platform, ...args: string[]) {
+ return this.config.resolveFromRepo(
+ 'build',
+ this.oss ? 'oss' : 'default',
+ `kibana-${this.config.getBuildVersion()}-${platform.getBuildName()}`,
+ ...args
+ );
+ }
+
+ getPlatformArchivePath(platform: Platform) {
+ const ext = platform.isWindows() ? 'zip' : 'tar.gz';
+ return this.config.resolveFromRepo(
+ 'target',
+ `${this.name}-${this.config.getBuildVersion()}-${platform.getBuildName()}.${ext}`
+ );
+ }
+
+ getName() {
+ return this.name;
+ }
+
+ getLogTag() {
+ return this.logTag;
+ }
+}
diff --git a/src/dev/build/lib/config.js b/src/dev/build/lib/config.js
deleted file mode 100644
index 36621f1c2d4ac..0000000000000
--- a/src/dev/build/lib/config.js
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { dirname, resolve, relative } from 'path';
-import os from 'os';
-
-import { getVersionInfo } from './version_info';
-import { createPlatform } from './platform';
-
-export async function getConfig({ isRelease, targetAllPlatforms, versionQualifier }) {
- const pkgPath = resolve(__dirname, '../../../../package.json');
- const pkg = require(pkgPath); // eslint-disable-line import/no-dynamic-require
- const repoRoot = dirname(pkgPath);
- const nodeVersion = pkg.engines.node;
-
- const platforms = [
- createPlatform('linux', 'x64', 'linux-x86_64'),
- createPlatform('linux', 'arm64', 'linux-aarch64'),
- createPlatform('darwin', 'x64', 'darwin-x86_64'),
- createPlatform('win32', 'x64', 'windows-x86_64'),
- ];
-
- const versionInfo = await getVersionInfo({
- isRelease,
- versionQualifier,
- pkg,
- });
-
- return new (class Config {
- /**
- * Get Kibana's parsed package.json file
- * @return {Object}
- */
- getKibanaPkg() {
- return pkg;
- }
-
- isRelease() {
- return isRelease;
- }
-
- /**
- * Get the node version required by Kibana
- * @return {String}
- */
- getNodeVersion() {
- return nodeVersion;
- }
-
- /**
- * Convert an absolute path to a relative path, based from the repo
- * @param {String} absolutePath
- * @return {String}
- */
- getRepoRelativePath(absolutePath) {
- return relative(repoRoot, absolutePath);
- }
-
- /**
- * Resolve a set of relative paths based from the directory of the Kibana repo
- * @param {...String} ...subPaths
- * @return {String}
- */
- resolveFromRepo(...subPaths) {
- return resolve(repoRoot, ...subPaths);
- }
-
- /**
- * Return the list of Platforms we are targeting, if --this-platform flag is
- * specified only the platform for this OS will be returned
- * @return {Array}
- */
- getTargetPlatforms() {
- if (targetAllPlatforms) {
- return platforms;
- }
-
- return [this.getPlatformForThisOs()];
- }
-
- /**
- * Return the list of Platforms we need/have node downloads for. We always
- * include the linux platform even if we aren't targeting linux so we can
- * reliably get the LICENSE file, which isn't included in the windows version
- * @return {Array}
- */
- getNodePlatforms() {
- if (targetAllPlatforms) {
- return platforms;
- }
-
- if (process.platform === 'linux') {
- return [this.getPlatform('linux', 'x64')];
- }
-
- return [this.getPlatformForThisOs(), this.getPlatform('linux', 'x64')];
- }
-
- getPlatform(name, arch) {
- const selected = platforms.find((p) => {
- return name === p.getName() && arch === p.getArchitecture();
- });
-
- if (!selected) {
- throw new Error(`Unable to find platform (${name}) with architecture (${arch})`);
- }
-
- return selected;
- }
-
- /**
- * Get the platform object representing the OS on this machine
- * @return {Platform}
- */
- getPlatformForThisOs() {
- return this.getPlatform(os.platform(), os.arch());
- }
-
- /**
- * Get the version to use for this build
- * @return {String}
- */
- getBuildVersion() {
- return versionInfo.buildVersion;
- }
-
- /**
- * Get the build number of this build
- * @return {Number}
- */
- getBuildNumber() {
- return versionInfo.buildNumber;
- }
-
- /**
- * Get the git sha for this build
- * @return {String}
- */
- getBuildSha() {
- return versionInfo.buildSha;
- }
-
- /**
- * Resolve a set of paths based from the target directory for this build.
- * @param {...String} ...subPaths
- * @return {String}
- */
- resolveFromTarget(...subPaths) {
- return resolve(repoRoot, 'target', ...subPaths);
- }
- })();
-}
diff --git a/src/dev/build/lib/config.test.ts b/src/dev/build/lib/config.test.ts
new file mode 100644
index 0000000000000..0539adc840a6a
--- /dev/null
+++ b/src/dev/build/lib/config.test.ts
@@ -0,0 +1,201 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { resolve } from 'path';
+
+import { createAbsolutePathSerializer, REPO_ROOT } from '@kbn/dev-utils';
+
+import pkg from '../../../../package.json';
+import { Config } from './config';
+
+jest.mock('./version_info', () => ({
+ getVersionInfo: () => ({
+ buildSha: 'abc1234',
+ buildVersion: '8.0.0',
+ buildNumber: 1234,
+ }),
+}));
+
+const versionInfo = jest.requireMock('./version_info').getVersionInfo();
+
+expect.addSnapshotSerializer(createAbsolutePathSerializer());
+
+const setup = async ({ targetAllPlatforms = true }: { targetAllPlatforms?: boolean } = {}) => {
+ return await Config.create({
+ isRelease: true,
+ targetAllPlatforms,
+ });
+};
+
+describe('#getKibanaPkg()', () => {
+ it('returns the parsed package.json from the Kibana repo', async () => {
+ const config = await setup();
+ expect(config.getKibanaPkg()).toEqual(pkg);
+ });
+});
+
+describe('#getNodeVersion()', () => {
+ it('returns the node version from the kibana package.json', async () => {
+ const config = await setup();
+ expect(config.getNodeVersion()).toEqual(pkg.engines.node);
+ });
+});
+
+describe('#getRepoRelativePath()', () => {
+ it('converts an absolute path to relative path, from the root of the repo', async () => {
+ const config = await setup();
+ expect(config.getRepoRelativePath(__dirname)).toMatchInlineSnapshot(`"src/dev/build/lib"`);
+ });
+});
+
+describe('#resolveFromRepo()', () => {
+ it('resolves a relative path', async () => {
+ const config = await setup();
+ expect(config.resolveFromRepo('src/dev/build')).toMatchInlineSnapshot(
+ `/src/dev/build`
+ );
+ });
+
+ it('resolves a series of relative paths', async () => {
+ const config = await setup();
+ expect(config.resolveFromRepo('src', 'dev', 'build')).toMatchInlineSnapshot(
+ `/src/dev/build`
+ );
+ });
+});
+
+describe('#getPlatform()', () => {
+ it('throws error when platform does not exist', async () => {
+ const config = await setup();
+ expect(() => {
+ config.getPlatform(
+ // @ts-expect-error invalid platform name
+ 'foo',
+ 'x64'
+ );
+ }).toThrowErrorMatchingInlineSnapshot(
+ `"Unable to find platform (foo) with architecture (x64)"`
+ );
+ });
+
+ it('throws error when architecture does not exist', async () => {
+ const config = await setup();
+ expect(() => {
+ config.getPlatform(
+ 'linux',
+ // @ts-expect-error invalid platform arch
+ 'foo'
+ );
+ }).toThrowErrorMatchingInlineSnapshot(
+ `"Unable to find platform (linux) with architecture (foo)"`
+ );
+ });
+});
+
+describe('#getTargetPlatforms()', () => {
+ it('returns an array of all platform objects', async () => {
+ const config = await setup();
+ expect(
+ config
+ .getTargetPlatforms()
+ .map((p) => p.getNodeArch())
+ .sort()
+ ).toMatchInlineSnapshot(`
+ Array [
+ "darwin-x64",
+ "linux-arm64",
+ "linux-x64",
+ "win32-x64",
+ ]
+ `);
+ });
+
+ it('returns just this platform when targetAllPlatforms = false', async () => {
+ const config = await setup({
+ targetAllPlatforms: false,
+ });
+
+ expect(config.getTargetPlatforms()).toEqual([config.getPlatformForThisOs()]);
+ });
+});
+
+describe('#getNodePlatforms()', () => {
+ it('returns all platforms', async () => {
+ const config = await setup();
+ expect(
+ config
+ .getTargetPlatforms()
+ .map((p) => p.getNodeArch())
+ .sort()
+ ).toEqual(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
+ });
+
+ it('returns this platform and linux, when targetAllPlatforms = false', async () => {
+ const config = await setup({
+ targetAllPlatforms: false,
+ });
+ const platforms = config.getNodePlatforms();
+ expect(platforms).toBeInstanceOf(Array);
+ if (process.platform !== 'linux') {
+ expect(platforms).toHaveLength(2);
+ expect(platforms[0]).toBe(config.getPlatformForThisOs());
+ expect(platforms[1]).toBe(config.getPlatform('linux', 'x64'));
+ } else {
+ expect(platforms).toHaveLength(1);
+ expect(platforms[0]).toBe(config.getPlatform('linux', 'x64'));
+ }
+ });
+});
+
+describe('#getPlatformForThisOs()', () => {
+ it('returns the platform that matches the arch of this machine', async () => {
+ const config = await setup();
+ const currentPlatform = config.getPlatformForThisOs();
+ expect(currentPlatform.getName()).toBe(process.platform);
+ expect(currentPlatform.getArchitecture()).toBe(process.arch);
+ });
+});
+
+describe('#getBuildVersion()', () => {
+ it('returns the version from the build info', async () => {
+ const config = await setup();
+ expect(config.getBuildVersion()).toBe(versionInfo.buildVersion);
+ });
+});
+
+describe('#getBuildNumber()', () => {
+ it('returns the number from the build info', async () => {
+ const config = await setup();
+ expect(config.getBuildNumber()).toBe(versionInfo.buildNumber);
+ });
+});
+
+describe('#getBuildSha()', () => {
+ it('returns the sha from the build info', async () => {
+ const config = await setup();
+ expect(config.getBuildSha()).toBe(versionInfo.buildSha);
+ });
+});
+
+describe('#resolveFromTarget()', () => {
+ it('resolves a relative path, from the target directory', async () => {
+ const config = await setup();
+ expect(config.resolveFromTarget()).toBe(resolve(REPO_ROOT, 'target'));
+ });
+});
diff --git a/src/dev/build/lib/config.ts b/src/dev/build/lib/config.ts
new file mode 100644
index 0000000000000..338c89b1930d8
--- /dev/null
+++ b/src/dev/build/lib/config.ts
@@ -0,0 +1,173 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { dirname, resolve, relative } from 'path';
+import os from 'os';
+import loadJsonFile from 'load-json-file';
+
+import { getVersionInfo, VersionInfo } from './version_info';
+import { PlatformName, PlatformArchitecture, ALL_PLATFORMS } from './platform';
+
+interface Options {
+ isRelease: boolean;
+ targetAllPlatforms: boolean;
+ versionQualifier?: string;
+}
+
+interface Package {
+ version: string;
+ engines: { node: string };
+ workspaces: {
+ packages: string[];
+ };
+ [key: string]: unknown;
+}
+
+export class Config {
+ static async create({ isRelease, targetAllPlatforms, versionQualifier }: Options) {
+ const pkgPath = resolve(__dirname, '../../../../package.json');
+ const pkg: Package = loadJsonFile.sync(pkgPath);
+
+ return new Config(
+ targetAllPlatforms,
+ pkg,
+ pkg.engines.node,
+ dirname(pkgPath),
+ await getVersionInfo({
+ isRelease,
+ versionQualifier,
+ pkg,
+ }),
+ isRelease
+ );
+ }
+
+ constructor(
+ private readonly targetAllPlatforms: boolean,
+ private readonly pkg: Package,
+ private readonly nodeVersion: string,
+ private readonly repoRoot: string,
+ private readonly versionInfo: VersionInfo,
+ public readonly isRelease: boolean
+ ) {}
+
+ /**
+ * Get Kibana's parsed package.json file
+ */
+ getKibanaPkg() {
+ return this.pkg;
+ }
+
+ /**
+ * Get the node version required by Kibana
+ */
+ getNodeVersion() {
+ return this.nodeVersion;
+ }
+
+ /**
+ * Convert an absolute path to a relative path, based from the repo
+ */
+ getRepoRelativePath(absolutePath: string) {
+ return relative(this.repoRoot, absolutePath);
+ }
+
+ /**
+ * Resolve a set of relative paths based from the directory of the Kibana repo
+ */
+ resolveFromRepo(...subPaths: string[]) {
+ return resolve(this.repoRoot, ...subPaths);
+ }
+
+ /**
+ * Return the list of Platforms we are targeting, if --this-platform flag is
+ * specified only the platform for this OS will be returned
+ */
+ getTargetPlatforms() {
+ if (this.targetAllPlatforms) {
+ return ALL_PLATFORMS;
+ }
+
+ return [this.getPlatformForThisOs()];
+ }
+
+ /**
+ * Return the list of Platforms we need/have node downloads for. We always
+ * include the linux platform even if we aren't targeting linux so we can
+ * reliably get the LICENSE file, which isn't included in the windows version
+ */
+ getNodePlatforms() {
+ if (this.targetAllPlatforms) {
+ return ALL_PLATFORMS;
+ }
+
+ if (process.platform === 'linux') {
+ return [this.getPlatform('linux', 'x64')];
+ }
+
+ return [this.getPlatformForThisOs(), this.getPlatform('linux', 'x64')];
+ }
+
+ getPlatform(name: PlatformName, arch: PlatformArchitecture) {
+ const selected = ALL_PLATFORMS.find((p) => {
+ return name === p.getName() && arch === p.getArchitecture();
+ });
+
+ if (!selected) {
+ throw new Error(`Unable to find platform (${name}) with architecture (${arch})`);
+ }
+
+ return selected;
+ }
+
+ /**
+ * Get the platform object representing the OS on this machine
+ */
+ getPlatformForThisOs() {
+ return this.getPlatform(os.platform() as PlatformName, os.arch() as PlatformArchitecture);
+ }
+
+ /**
+ * Get the version to use for this build
+ */
+ getBuildVersion() {
+ return this.versionInfo.buildVersion;
+ }
+
+ /**
+ * Get the build number of this build
+ */
+ getBuildNumber() {
+ return this.versionInfo.buildNumber;
+ }
+
+ /**
+ * Get the git sha for this build
+ */
+ getBuildSha() {
+ return this.versionInfo.buildSha;
+ }
+
+ /**
+ * Resolve a set of paths based from the target directory for this build.
+ */
+ resolveFromTarget(...subPaths: string[]) {
+ return resolve(this.repoRoot, 'target', ...subPaths);
+ }
+}
diff --git a/src/dev/build/lib/download.js b/src/dev/build/lib/download.ts
similarity index 81%
rename from src/dev/build/lib/download.js
rename to src/dev/build/lib/download.ts
index fbd2d47ff7b06..7c1618b833b45 100644
--- a/src/dev/build/lib/download.js
+++ b/src/dev/build/lib/download.ts
@@ -23,10 +23,15 @@ import { dirname } from 'path';
import chalk from 'chalk';
import { createHash } from 'crypto';
import Axios from 'axios';
+import { ToolingLog } from '@kbn/dev-utils';
+
+// https://github.com/axios/axios/tree/ffea03453f77a8176c51554d5f6c3c6829294649/lib/adapters
+// @ts-expect-error untyped internal module used to prevent axios from using xhr adapter in tests
+import AxiosHttpAdapter from 'axios/lib/adapters/http';
import { mkdirp } from './fs';
-function tryUnlink(path) {
+function tryUnlink(path: string) {
try {
unlinkSync(path);
} catch (error) {
@@ -36,7 +41,14 @@ function tryUnlink(path) {
}
}
-export async function download(options) {
+interface DownloadOptions {
+ log: ToolingLog;
+ url: string;
+ destination: string;
+ sha256: string;
+ retries?: number;
+}
+export async function download(options: DownloadOptions): Promise {
const { log, url, destination, sha256, retries = 0 } = options;
if (!sha256) {
@@ -52,8 +64,9 @@ export async function download(options) {
log.debug(`Attempting download of ${url}`, chalk.dim(sha256));
const response = await Axios.request({
- url: url,
+ url,
responseType: 'stream',
+ adapter: AxiosHttpAdapter,
});
if (response.status !== 200) {
@@ -62,7 +75,7 @@ export async function download(options) {
const hash = createHash('sha256');
await new Promise((resolve, reject) => {
- response.data.on('data', (chunk) => {
+ response.data.on('data', (chunk: Buffer) => {
hash.update(chunk);
writeSync(fileHandle, chunk);
});
diff --git a/src/dev/build/lib/__tests__/errors.js b/src/dev/build/lib/errors.test.ts
similarity index 67%
rename from src/dev/build/lib/__tests__/errors.js
rename to src/dev/build/lib/errors.test.ts
index dc23b3e372bc6..0bf96463555fe 100644
--- a/src/dev/build/lib/__tests__/errors.js
+++ b/src/dev/build/lib/errors.test.ts
@@ -17,28 +17,26 @@
* under the License.
*/
-import expect from '@kbn/expect';
-
-import { isErrorLogged, markErrorLogged } from '../errors';
+import { isErrorLogged, markErrorLogged } from './errors';
describe('dev/build/lib/errors', () => {
describe('isErrorLogged()/markErrorLogged()', () => {
it('returns true if error has been passed to markErrorLogged()', () => {
const error = new Error();
- expect(isErrorLogged(error)).to.be(false);
+ expect(isErrorLogged(error)).toBe(false);
markErrorLogged(error);
- expect(isErrorLogged(error)).to.be(true);
+ expect(isErrorLogged(error)).toBe(true);
});
describe('isErrorLogged()', () => {
it('handles any value type', () => {
- expect(isErrorLogged(null)).to.be(false);
- expect(isErrorLogged(undefined)).to.be(false);
- expect(isErrorLogged(1)).to.be(false);
- expect(isErrorLogged([])).to.be(false);
- expect(isErrorLogged({})).to.be(false);
- expect(isErrorLogged(/foo/)).to.be(false);
- expect(isErrorLogged(new Date())).to.be(false);
+ expect(isErrorLogged(null)).toBe(false);
+ expect(isErrorLogged(undefined)).toBe(false);
+ expect(isErrorLogged(1)).toBe(false);
+ expect(isErrorLogged([])).toBe(false);
+ expect(isErrorLogged({})).toBe(false);
+ expect(isErrorLogged(/foo/)).toBe(false);
+ expect(isErrorLogged(new Date())).toBe(false);
});
});
});
diff --git a/src/dev/build/lib/errors.js b/src/dev/build/lib/errors.ts
similarity index 86%
rename from src/dev/build/lib/errors.js
rename to src/dev/build/lib/errors.ts
index 7fb8e2dc070d1..8405e9d29a033 100644
--- a/src/dev/build/lib/errors.js
+++ b/src/dev/build/lib/errors.ts
@@ -17,13 +17,13 @@
* under the License.
*/
-const loggedErrors = new WeakSet();
+const loggedErrors = new WeakSet();
-export function markErrorLogged(error) {
+export function markErrorLogged(error: T): T {
loggedErrors.add(error);
return error;
}
-export function isErrorLogged(error) {
+export function isErrorLogged(error: any) {
return loggedErrors.has(error);
}
diff --git a/src/dev/build/lib/exec.test.ts b/src/dev/build/lib/exec.test.ts
new file mode 100644
index 0000000000000..6f6ec4f26afbb
--- /dev/null
+++ b/src/dev/build/lib/exec.test.ts
@@ -0,0 +1,67 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import Path from 'path';
+
+import {
+ ToolingLog,
+ ToolingLogCollectingWriter,
+ createStripAnsiSerializer,
+ createRecursiveSerializer,
+} from '@kbn/dev-utils';
+
+import { exec } from './exec';
+
+const testWriter = new ToolingLogCollectingWriter();
+const log = new ToolingLog();
+log.setWriters([testWriter]);
+
+expect.addSnapshotSerializer(createStripAnsiSerializer());
+expect.addSnapshotSerializer(
+ createRecursiveSerializer(
+ (v) => v.includes(process.execPath),
+ (v) => v.split(Path.dirname(process.execPath)).join('')
+ )
+);
+
+beforeEach(() => {
+ testWriter.messages.length = 0;
+});
+
+it('executes a command, logs the command, and logs the output', async () => {
+ await exec(log, process.execPath, ['-e', 'console.log("hi")']);
+ expect(testWriter.messages).toMatchInlineSnapshot(`
+ Array [
+ " debg $ /node -e console.log(\\"hi\\")",
+ " debg hi",
+ ]
+ `);
+});
+
+it('logs using level: option', async () => {
+ await exec(log, process.execPath, ['-e', 'console.log("hi")'], {
+ level: 'info',
+ });
+ expect(testWriter.messages).toMatchInlineSnapshot(`
+ Array [
+ " info $ /node -e console.log(\\"hi\\")",
+ " info hi",
+ ]
+ `);
+});
diff --git a/src/dev/build/lib/exec.js b/src/dev/build/lib/exec.ts
similarity index 73%
rename from src/dev/build/lib/exec.js
rename to src/dev/build/lib/exec.ts
index 5e47500c72c5c..c3870230b8f31 100644
--- a/src/dev/build/lib/exec.js
+++ b/src/dev/build/lib/exec.ts
@@ -19,12 +19,23 @@
import execa from 'execa';
import chalk from 'chalk';
+import { ToolingLog, LogLevel } from '@kbn/dev-utils';
-import { watchStdioForLine } from '../../../legacy/utils';
+import { watchStdioForLine } from './watch_stdio_for_line';
-export async function exec(log, cmd, args, options = {}) {
- const { level = 'debug', cwd, env, exitAfter } = options;
+interface Options {
+ level?: Exclude;
+ cwd?: string;
+ env?: Record;
+ exitAfter?: RegExp;
+}
+export async function exec(
+ log: ToolingLog,
+ cmd: string,
+ args: string[],
+ { level = 'debug', cwd, env, exitAfter }: Options = {}
+) {
log[level](chalk.dim('$'), cmd, ...args);
const proc = execa(cmd, args, {
diff --git a/src/dev/build/lib/fs.js b/src/dev/build/lib/fs.ts
similarity index 56%
rename from src/dev/build/lib/fs.js
rename to src/dev/build/lib/fs.ts
index b905f40d0de1e..d86901c41e436 100644
--- a/src/dev/build/lib/fs.js
+++ b/src/dev/build/lib/fs.ts
@@ -17,28 +17,31 @@
* under the License.
*/
-import archiver from 'archiver';
import fs from 'fs';
import { createHash } from 'crypto';
+import { pipeline, Writable } from 'stream';
import { resolve, dirname, isAbsolute, sep } from 'path';
import { createGunzip } from 'zlib';
-import { inspect } from 'util';
+import { inspect, promisify } from 'util';
+import archiver from 'archiver';
import vfs from 'vinyl-fs';
-import { promisify } from 'bluebird';
+import File from 'vinyl';
import del from 'del';
import deleteEmpty from 'delete-empty';
-import { createPromiseFromStreams, createMapStream } from '../../../legacy/utils';
-
-import tar from 'tar';
+import tar, { ExtractOptions } from 'tar';
+import { ToolingLog } from '@kbn/dev-utils';
+const pipelineAsync = promisify(pipeline);
const mkdirAsync = promisify(fs.mkdir);
const writeFileAsync = promisify(fs.writeFile);
const readFileAsync = promisify(fs.readFile);
const readdirAsync = promisify(fs.readdir);
const utimesAsync = promisify(fs.utimes);
+const copyFileAsync = promisify(fs.copyFile);
+const statAsync = promisify(fs.stat);
-export function assertAbsolute(path) {
+export function assertAbsolute(path: string) {
if (!isAbsolute(path)) {
throw new TypeError(
'Please use absolute paths to keep things explicit. You probably want to use `build.resolvePath()` or `config.resolveFromRepo()`.'
@@ -46,7 +49,7 @@ export function assertAbsolute(path) {
}
}
-export function isFileAccessible(path) {
+export function isFileAccessible(path: string) {
assertAbsolute(path);
try {
@@ -57,35 +60,35 @@ export function isFileAccessible(path) {
}
}
-function longInspect(value) {
+function longInspect(value: any) {
return inspect(value, {
maxArrayLength: Infinity,
});
}
-export async function mkdirp(path) {
+export async function mkdirp(path: string) {
assertAbsolute(path);
await mkdirAsync(path, { recursive: true });
}
-export async function write(path, contents) {
+export async function write(path: string, contents: string) {
assertAbsolute(path);
await mkdirp(dirname(path));
await writeFileAsync(path, contents);
}
-export async function read(path) {
+export async function read(path: string) {
assertAbsolute(path);
return await readFileAsync(path, 'utf8');
}
-export async function getChildPaths(path) {
+export async function getChildPaths(path: string) {
assertAbsolute(path);
const childNames = await readdirAsync(path);
return childNames.map((name) => resolve(path, name));
}
-export async function deleteAll(patterns, log) {
+export async function deleteAll(patterns: string[], log: ToolingLog) {
if (!Array.isArray(patterns)) {
throw new TypeError('Expected patterns to be an array');
}
@@ -108,7 +111,11 @@ export async function deleteAll(patterns, log) {
}
}
-export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) {
+export async function deleteEmptyFolders(
+ log: ToolingLog,
+ rootFolderPath: string,
+ foldersToKeep: string[]
+) {
if (typeof rootFolderPath !== 'string') {
throw new TypeError('Expected root folder to be a string path');
}
@@ -121,7 +128,11 @@ export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) {
// Delete empty is used to gather all the empty folders and
// then we use del to actually delete them
- const emptyFoldersList = await deleteEmpty(rootFolderPath, { dryRun: true });
+ const emptyFoldersList = await deleteEmpty(rootFolderPath, {
+ // @ts-expect-error DT package has incorrect types https://github.com/jonschlinkert/delete-empty/blob/6ae34547663e6845c3c98b184c606fa90ef79c0a/index.js#L160
+ dryRun: true,
+ });
+
const foldersToDelete = emptyFoldersList.filter((folderToDelete) => {
return !foldersToKeep.some((folderToKeep) => folderToDelete.includes(folderToKeep));
});
@@ -133,85 +144,153 @@ export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) {
log.verbose('Deleted:', longInspect(deletedEmptyFolders));
}
-export async function copyAll(sourceDir, destination, options = {}) {
- const { select = ['**/*'], dot = false, time } = options;
+interface CopyOptions {
+ clone?: boolean;
+}
+export async function copy(source: string, destination: string, options: CopyOptions = {}) {
+ assertAbsolute(source);
+ assertAbsolute(destination);
+
+ // ensure source exists before creating destination directory and copying source
+ await statAsync(source);
+ await mkdirp(dirname(destination));
+ return await copyFileAsync(
+ source,
+ destination,
+ options.clone ? fs.constants.COPYFILE_FICLONE : 0
+ );
+}
+
+interface CopyAllOptions {
+ select?: string[];
+ dot?: boolean;
+ time?: string | number | Date;
+}
+
+export async function copyAll(
+ sourceDir: string,
+ destination: string,
+ options: CopyAllOptions = {}
+) {
+ const { select = ['**/*'], dot = false, time = Date.now() } = options;
assertAbsolute(sourceDir);
assertAbsolute(destination);
- await createPromiseFromStreams([
+ await pipelineAsync(
vfs.src(select, {
buffer: false,
cwd: sourceDir,
base: sourceDir,
dot,
}),
- vfs.dest(destination),
- ]);
+ vfs.dest(destination)
+ );
// we must update access and modified file times after the file copy
// has completed, otherwise the copy action can effect modify times.
if (Boolean(time)) {
- await createPromiseFromStreams([
+ await pipelineAsync(
vfs.src(select, {
buffer: false,
cwd: destination,
base: destination,
dot,
}),
- createMapStream((file) => utimesAsync(file.path, time, time)),
- ]);
+ new Writable({
+ objectMode: true,
+ write(file: File, _, cb) {
+ utimesAsync(file.path, time, time).then(() => cb(), cb);
+ },
+ })
+ );
}
}
-export async function getFileHash(path, algo) {
+export async function getFileHash(path: string, algo: string) {
assertAbsolute(path);
const hash = createHash(algo);
const readStream = fs.createReadStream(path);
- await new Promise((resolve, reject) => {
+ await new Promise((res, rej) => {
readStream
.on('data', (chunk) => hash.update(chunk))
- .on('error', reject)
- .on('end', resolve);
+ .on('error', rej)
+ .on('end', res);
});
return hash.digest('hex');
}
-export async function untar(source, destination, extractOptions = {}) {
+export async function untar(
+ source: string,
+ destination: string,
+ extractOptions: ExtractOptions = {}
+) {
assertAbsolute(source);
assertAbsolute(destination);
await mkdirAsync(destination, { recursive: true });
- await createPromiseFromStreams([
+ await pipelineAsync(
fs.createReadStream(source),
createGunzip(),
tar.extract({
...extractOptions,
cwd: destination,
- }),
- ]);
+ })
+ );
}
-export async function gunzip(source, destination) {
+export async function gunzip(source: string, destination: string) {
assertAbsolute(source);
assertAbsolute(destination);
await mkdirAsync(dirname(destination), { recursive: true });
- await createPromiseFromStreams([
+ await pipelineAsync(
fs.createReadStream(source),
createGunzip(),
- fs.createWriteStream(destination),
- ]);
+ fs.createWriteStream(destination)
+ );
+}
+
+interface CompressTarOptions {
+ createRootDirectory: boolean;
+ source: string;
+ destination: string;
+ archiverOptions?: archiver.TarOptions & archiver.CoreOptions;
}
+export async function compressTar({
+ source,
+ destination,
+ archiverOptions,
+ createRootDirectory,
+}: CompressTarOptions) {
+ const output = fs.createWriteStream(destination);
+ const archive = archiver('tar', archiverOptions);
+ const name = createRootDirectory ? source.split(sep).slice(-1)[0] : false;
+
+ archive.pipe(output);
-export async function compress(type, options = {}, source, destination) {
+ return archive.directory(source, name).finalize();
+}
+
+interface CompressZipOptions {
+ createRootDirectory: boolean;
+ source: string;
+ destination: string;
+ archiverOptions?: archiver.ZipOptions & archiver.CoreOptions;
+}
+export async function compressZip({
+ source,
+ destination,
+ archiverOptions,
+ createRootDirectory,
+}: CompressZipOptions) {
const output = fs.createWriteStream(destination);
- const archive = archiver(type, options.archiverOptions);
- const name = options.createRootDirectory ? source.split(sep).slice(-1)[0] : false;
+ const archive = archiver('zip', archiverOptions);
+ const name = createRootDirectory ? source.split(sep).slice(-1)[0] : false;
archive.pipe(output);
diff --git a/src/dev/build/lib/index.js b/src/dev/build/lib/index.js
deleted file mode 100644
index 6540db6f37a72..0000000000000
--- a/src/dev/build/lib/index.js
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export { getConfig } from './config';
-export { createRunner } from './runner';
-export { isErrorLogged } from './errors';
-export { exec } from './exec';
-export {
- read,
- write,
- mkdirp,
- copyAll,
- getFileHash,
- untar,
- gunzip,
- deleteAll,
- deleteEmptyFolders,
- compress,
- isFileAccessible,
-} from './fs';
-export { download } from './download';
-export { scanDelete } from './scan_delete';
-export { scanCopy } from './scan_copy';
diff --git a/src/dev/build/lib/index.ts b/src/dev/build/lib/index.ts
new file mode 100644
index 0000000000000..339dc41cc6ccf
--- /dev/null
+++ b/src/dev/build/lib/index.ts
@@ -0,0 +1,30 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export * from './config';
+export * from './build';
+export * from './runner';
+export * from './errors';
+export * from './exec';
+export * from './fs';
+export * from './download';
+export * from './scan_delete';
+export * from './scan_copy';
+export * from './platform';
+export * from './scan';
diff --git a/src/dev/build/lib/integration_tests/download.test.ts b/src/dev/build/lib/integration_tests/download.test.ts
new file mode 100644
index 0000000000000..a86d5292501f5
--- /dev/null
+++ b/src/dev/build/lib/integration_tests/download.test.ts
@@ -0,0 +1,226 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { createServer, IncomingMessage, ServerResponse } from 'http';
+import { join } from 'path';
+import { tmpdir } from 'os';
+import { readFileSync } from 'fs';
+
+import del from 'del';
+import { CI_PARALLEL_PROCESS_PREFIX } from '@kbn/test';
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { mkdirp } from '../fs';
+import { download } from '../download';
+
+const TMP_DIR = join(tmpdir(), CI_PARALLEL_PROCESS_PREFIX, 'download-js-test-tmp-dir');
+const TMP_DESTINATION = join(TMP_DIR, '__tmp_download_js_test_file__');
+
+beforeEach(async () => {
+ await del(TMP_DIR, { force: true });
+ await mkdirp(TMP_DIR);
+ jest.clearAllMocks();
+});
+
+afterEach(async () => {
+ await del(TMP_DIR, { force: true });
+});
+
+const onLogLine = jest.fn();
+const log = new ToolingLog({
+ level: 'verbose',
+ writeTo: {
+ write: onLogLine,
+ },
+});
+
+type Handler = (req: IncomingMessage, res: ServerResponse) => void;
+
+const FOO_SHA256 = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
+const createSendHandler = (send: any): Handler => (req, res) => {
+ res.statusCode = 200;
+ res.end(send);
+};
+const sendErrorHandler: Handler = (req, res) => {
+ res.statusCode = 500;
+ res.end();
+};
+
+let serverUrl: string;
+let nextHandler: Handler | null = null;
+const server = createServer((req, res) => {
+ if (!nextHandler) {
+ nextHandler = sendErrorHandler;
+ }
+
+ const handler = nextHandler;
+ nextHandler = null;
+ handler(req, res);
+});
+
+afterEach(() => (nextHandler = null));
+
+beforeAll(async () => {
+ await Promise.race([
+ new Promise((_, reject) => {
+ server.once('error', reject);
+ }),
+ new Promise((resolve) => {
+ server.listen(resolve);
+ }),
+ ]);
+
+ // address is only a string when listening to a UNIX socket, and undefined when we haven't called listen() yet
+ const address = server.address() as { port: number };
+
+ serverUrl = `http://localhost:${address.port}/`;
+});
+
+afterAll(async () => {
+ server.close();
+});
+
+it('downloads from URL and checks that content matches sha256', async () => {
+ nextHandler = createSendHandler('foo');
+ await download({
+ log,
+ url: serverUrl,
+ destination: TMP_DESTINATION,
+ sha256: FOO_SHA256,
+ });
+ expect(readFileSync(TMP_DESTINATION, 'utf8')).toBe('foo');
+});
+
+it('rejects and deletes destination if sha256 does not match', async () => {
+ nextHandler = createSendHandler('foo');
+
+ try {
+ await download({
+ log,
+ url: serverUrl,
+ destination: TMP_DESTINATION,
+ sha256: 'bar',
+ });
+ throw new Error('Expected download() to reject');
+ } catch (error) {
+ expect(error).toHaveProperty(
+ 'message',
+ expect.stringContaining('does not match the expected sha256 checksum')
+ );
+ }
+
+ try {
+ readFileSync(TMP_DESTINATION);
+ throw new Error('Expected download to be deleted');
+ } catch (error) {
+ expect(error).toHaveProperty('code', 'ENOENT');
+ }
+});
+
+describe('reties download retries: number of times', () => {
+ it('resolves if retries = 1 and first attempt fails', async () => {
+ let reqCount = 0;
+ nextHandler = function sequenceHandler(req, res) {
+ switch (++reqCount) {
+ case 1:
+ nextHandler = sequenceHandler;
+ return sendErrorHandler(req, res);
+ default:
+ return createSendHandler('foo')(req, res);
+ }
+ };
+
+ await download({
+ log,
+ url: serverUrl,
+ destination: TMP_DESTINATION,
+ sha256: FOO_SHA256,
+ retries: 2,
+ });
+
+ expect(readFileSync(TMP_DESTINATION, 'utf8')).toBe('foo');
+ });
+
+ it('resolves if first fails, second is bad shasum, but third succeeds', async () => {
+ let reqCount = 0;
+ nextHandler = function sequenceHandler(req, res) {
+ switch (++reqCount) {
+ case 1:
+ nextHandler = sequenceHandler;
+ return sendErrorHandler(req, res);
+ case 2:
+ nextHandler = sequenceHandler;
+ return createSendHandler('bar')(req, res);
+ default:
+ return createSendHandler('foo')(req, res);
+ }
+ };
+
+ await download({
+ log,
+ url: serverUrl,
+ destination: TMP_DESTINATION,
+ sha256: FOO_SHA256,
+ retries: 2,
+ });
+ });
+
+ it('makes 6 requests if `retries: 5` and all failed', async () => {
+ let reqCount = 0;
+ nextHandler = function sequenceHandler(req, res) {
+ reqCount += 1;
+ nextHandler = sequenceHandler;
+ sendErrorHandler(req, res);
+ };
+
+ try {
+ await download({
+ log,
+ url: serverUrl,
+ destination: TMP_DESTINATION,
+ sha256: FOO_SHA256,
+ retries: 5,
+ });
+ throw new Error('Expected download() to reject');
+ } catch (error) {
+ expect(error).toHaveProperty(
+ 'message',
+ expect.stringContaining('Request failed with status code 500')
+ );
+ expect(reqCount).toBe(6);
+ }
+ });
+});
+
+describe('sha256 option not supplied', () => {
+ it('refuses to download', async () => {
+ try {
+ // @ts-expect-error missing sha256 param is intentional
+ await download({
+ log,
+ url: 'http://google.com',
+ destination: TMP_DESTINATION,
+ });
+
+ throw new Error('expected download() to reject');
+ } catch (error) {
+ expect(error).toHaveProperty('message', expect.stringContaining('refusing to download'));
+ }
+ });
+});
diff --git a/src/dev/build/lib/integration_tests/fs.test.ts b/src/dev/build/lib/integration_tests/fs.test.ts
new file mode 100644
index 0000000000000..e9ce09554159b
--- /dev/null
+++ b/src/dev/build/lib/integration_tests/fs.test.ts
@@ -0,0 +1,358 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { resolve } from 'path';
+import { chmodSync, statSync } from 'fs';
+
+import del from 'del';
+
+import { mkdirp, write, read, getChildPaths, copyAll, getFileHash, untar, gunzip } from '../fs';
+
+const TMP = resolve(__dirname, '../__tmp__');
+const FIXTURES = resolve(__dirname, '../__fixtures__');
+const FOO_TAR_PATH = resolve(FIXTURES, 'foo_dir.tar.gz');
+const FOO_GZIP_PATH = resolve(FIXTURES, 'foo.txt.gz');
+const BAR_TXT_PATH = resolve(FIXTURES, 'foo_dir/bar.txt');
+const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable');
+
+const isWindows = /^win/.test(process.platform);
+
+// get the mode of a file as a string, like 777, or 644,
+function getCommonMode(path: string) {
+ return statSync(path).mode.toString(8).slice(-3);
+}
+
+function assertNonAbsoluteError(error: any) {
+ expect(error).toBeInstanceOf(Error);
+ expect(error.message).toContain('Please use absolute paths');
+}
+
+// ensure WORLD_EXECUTABLE is actually executable by all
+beforeAll(async () => {
+ chmodSync(WORLD_EXECUTABLE, 0o777);
+});
+
+// clean and recreate TMP directory
+beforeEach(async () => {
+ await del(TMP);
+ await mkdirp(TMP);
+});
+
+// cleanup TMP directory
+afterAll(async () => {
+ await del(TMP);
+});
+
+describe('mkdirp()', () => {
+ it('rejects if path is not absolute', async () => {
+ try {
+ await mkdirp('foo/bar');
+ throw new Error('Expected mkdirp() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('makes directory and necessary parent directories', async () => {
+ const destination = resolve(TMP, 'a/b/c/d/e/f/g');
+
+ expect(await mkdirp(destination)).toBe(undefined);
+
+ expect(statSync(destination).isDirectory()).toBe(true);
+ });
+});
+
+describe('write()', () => {
+ it('rejects if path is not absolute', async () => {
+ try {
+ // @ts-expect-error missing content intentional
+ await write('foo/bar');
+ throw new Error('Expected write() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('writes content to a file with existing parent directory', async () => {
+ const destination = resolve(TMP, 'a');
+
+ expect(await write(destination, 'bar')).toBe(undefined);
+ expect(await read(destination)).toBe('bar');
+ });
+
+ it('writes content to a file with missing parents', async () => {
+ const destination = resolve(TMP, 'a/b/c/d/e');
+
+ expect(await write(destination, 'bar')).toBe(undefined);
+ expect(await read(destination)).toBe('bar');
+ });
+});
+
+describe('read()', () => {
+ it('rejects if path is not absolute', async () => {
+ try {
+ await read('foo/bar');
+ throw new Error('Expected read() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('reads file, resolves with result', async () => {
+ expect(await read(BAR_TXT_PATH)).toBe('bar\n');
+ });
+});
+
+describe('getChildPaths()', () => {
+ it('rejects if path is not absolute', async () => {
+ try {
+ await getChildPaths('foo/bar');
+ throw new Error('Expected getChildPaths() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('resolves with absolute paths to the children of directory', async () => {
+ const path = resolve(FIXTURES, 'foo_dir');
+ expect((await getChildPaths(path)).sort()).toEqual([
+ resolve(FIXTURES, 'foo_dir/.bar'),
+ BAR_TXT_PATH,
+ resolve(FIXTURES, 'foo_dir/foo'),
+ ]);
+ });
+
+ it('rejects with ENOENT if path does not exist', async () => {
+ try {
+ await getChildPaths(resolve(FIXTURES, 'notrealpath'));
+ throw new Error('Expected getChildPaths() to reject');
+ } catch (error) {
+ expect(error).toHaveProperty('code', 'ENOENT');
+ }
+ });
+});
+
+describe('copyAll()', () => {
+ it('rejects if source path is not absolute', async () => {
+ try {
+ await copyAll('foo/bar', __dirname);
+ throw new Error('Expected copyAll() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('rejects if destination path is not absolute', async () => {
+ try {
+ await copyAll(__dirname, 'foo/bar');
+ throw new Error('Expected copyAll() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('rejects if neither path is not absolute', async () => {
+ try {
+ await copyAll('foo/bar', 'foo/bar');
+ throw new Error('Expected copyAll() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('copies files and directories from source to dest, creating dest if necessary, respecting mode', async () => {
+ const destination = resolve(TMP, 'a/b/c');
+ await copyAll(FIXTURES, destination);
+
+ expect((await getChildPaths(resolve(destination, 'foo_dir'))).sort()).toEqual([
+ resolve(destination, 'foo_dir/bar.txt'),
+ resolve(destination, 'foo_dir/foo'),
+ ]);
+
+ expect(getCommonMode(resolve(destination, 'bin/world_executable'))).toBe(
+ isWindows ? '666' : '777'
+ );
+ expect(getCommonMode(resolve(destination, 'foo_dir/bar.txt'))).toBe(isWindows ? '666' : '644');
+ });
+
+ it('applies select globs if specified, ignores dot files', async () => {
+ const destination = resolve(TMP, 'a/b/c/d');
+ await copyAll(FIXTURES, destination, {
+ select: ['**/*bar*'],
+ });
+
+ try {
+ statSync(resolve(destination, 'bin/world_executable'));
+ throw new Error('expected bin/world_executable to not by copied');
+ } catch (error) {
+ expect(error).toHaveProperty('code', 'ENOENT');
+ }
+
+ try {
+ statSync(resolve(destination, 'foo_dir/.bar'));
+ throw new Error('expected foo_dir/.bar to not by copied');
+ } catch (error) {
+ expect(error).toHaveProperty('code', 'ENOENT');
+ }
+
+ expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n');
+ });
+
+ it('supports select globs and dot option together', async () => {
+ const destination = resolve(TMP, 'a/b/c/d');
+ await copyAll(FIXTURES, destination, {
+ select: ['**/*bar*'],
+ dot: true,
+ });
+
+ try {
+ statSync(resolve(destination, 'bin/world_executable'));
+ throw new Error('expected bin/world_executable to not by copied');
+ } catch (error) {
+ expect(error).toHaveProperty('code', 'ENOENT');
+ }
+
+ expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n');
+ expect(await read(resolve(destination, 'foo_dir/.bar'))).toBe('dotfile\n');
+ });
+
+ it('supports atime and mtime', async () => {
+ const destination = resolve(TMP, 'a/b/c/d/e');
+ const time = new Date(1425298511000);
+ await copyAll(FIXTURES, destination, {
+ time,
+ });
+ const barTxt = statSync(resolve(destination, 'foo_dir/bar.txt'));
+ const fooDir = statSync(resolve(destination, 'foo_dir'));
+
+ // precision is platform specific
+ const oneDay = 86400000;
+ expect(Math.abs(barTxt.atimeMs - time.getTime())).toBeLessThan(oneDay);
+ expect(Math.abs(fooDir.atimeMs - time.getTime())).toBeLessThan(oneDay);
+ expect(Math.abs(barTxt.mtimeMs - time.getTime())).toBeLessThan(oneDay);
+ });
+});
+
+describe('getFileHash()', () => {
+ it('rejects if path is not absolute', async () => {
+ try {
+ await getFileHash('foo/bar', 'some content');
+ throw new Error('Expected getFileHash() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('resolves with the sha1 hash of a file', async () => {
+ expect(await getFileHash(BAR_TXT_PATH, 'sha1')).toBe(
+ 'e242ed3bffccdf271b7fbaf34ed72d089537b42f'
+ );
+ });
+ it('resolves with the sha256 hash of a file', async () => {
+ expect(await getFileHash(BAR_TXT_PATH, 'sha256')).toBe(
+ '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730'
+ );
+ });
+ it('resolves with the md5 hash of a file', async () => {
+ expect(await getFileHash(BAR_TXT_PATH, 'md5')).toBe('c157a79031e1c40f85931829bc5fc552');
+ });
+});
+
+describe('untar()', () => {
+ it('rejects if source path is not absolute', async () => {
+ try {
+ await untar('foo/bar', '**/*');
+ throw new Error('Expected untar() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('rejects if destination path is not absolute', async () => {
+ try {
+ await untar(__dirname, '**/*');
+ throw new Error('Expected untar() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('rejects if neither path is not absolute', async () => {
+ try {
+ await untar('foo/bar', '**/*');
+ throw new Error('Expected untar() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('extracts tarbar from source into destination, creating destination if necessary', async () => {
+ const destination = resolve(TMP, 'a/b/c/d/e/f');
+ await untar(FOO_TAR_PATH, destination);
+ expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n');
+ expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).toBe('foo\n');
+ });
+
+ it('passed thrid argument to Extract class, overriding path with destination', async () => {
+ const destination = resolve(TMP, 'a/b/c');
+
+ await untar(FOO_TAR_PATH, destination, {
+ path: '/dev/null',
+ strip: 1,
+ });
+
+ expect(await read(resolve(destination, 'bar.txt'))).toBe('bar\n');
+ expect(await read(resolve(destination, 'foo/foo.txt'))).toBe('foo\n');
+ });
+});
+
+describe('gunzip()', () => {
+ it('rejects if source path is not absolute', async () => {
+ try {
+ await gunzip('foo/bar', '**/*');
+ throw new Error('Expected gunzip() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('rejects if destination path is not absolute', async () => {
+ try {
+ await gunzip(__dirname, '**/*');
+ throw new Error('Expected gunzip() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('rejects if neither path is not absolute', async () => {
+ try {
+ await gunzip('foo/bar', '**/*');
+ throw new Error('Expected gunzip() to reject');
+ } catch (error) {
+ assertNonAbsoluteError(error);
+ }
+ });
+
+ it('extracts gzip from source into destination, creating destination if necessary', async () => {
+ const destination = resolve(TMP, 'z/y/x/v/u/t/foo.txt');
+ await gunzip(FOO_GZIP_PATH, destination);
+ expect(await read(resolve(destination))).toBe('foo\n');
+ });
+});
diff --git a/src/dev/build/lib/scan_copy.test.ts b/src/dev/build/lib/integration_tests/scan_copy.test.ts
similarity index 94%
rename from src/dev/build/lib/scan_copy.test.ts
rename to src/dev/build/lib/integration_tests/scan_copy.test.ts
index ba693770445dc..f81951c575313 100644
--- a/src/dev/build/lib/scan_copy.test.ts
+++ b/src/dev/build/lib/integration_tests/scan_copy.test.ts
@@ -22,14 +22,13 @@ import { resolve } from 'path';
import del from 'del';
-// @ts-ignore
-import { getChildPaths, mkdirp, write } from './fs';
-import { scanCopy } from './scan_copy';
+import { getChildPaths } from '../fs';
+import { scanCopy } from '../scan_copy';
const IS_WINDOWS = process.platform === 'win32';
-const FIXTURES = resolve(__dirname, '__tests__/fixtures');
+const FIXTURES = resolve(__dirname, '../__fixtures__');
+const TMP = resolve(__dirname, '../__tmp__');
const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable');
-const TMP = resolve(__dirname, '__tests__/__tmp__');
const getCommonMode = (path: string) => statSync(path).mode.toString(8).slice(-3);
diff --git a/src/dev/build/lib/integration_tests/watch_stdio_for_line.test.ts b/src/dev/build/lib/integration_tests/watch_stdio_for_line.test.ts
new file mode 100644
index 0000000000000..007a3bc631c60
--- /dev/null
+++ b/src/dev/build/lib/integration_tests/watch_stdio_for_line.test.ts
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import execa from 'execa';
+
+import { watchStdioForLine } from '../watch_stdio_for_line';
+
+const onLogLine = jest.fn();
+
+beforeEach(() => {
+ jest.clearAllMocks();
+});
+
+it('calls logFn with log lines', async () => {
+ const proc = execa(process.execPath, ['-e', 'console.log("hi")']);
+ await watchStdioForLine(proc, onLogLine);
+ expect(onLogLine.mock.calls).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "hi",
+ ],
+ ]
+ `);
+});
+
+it('send the proc SIGKILL if it logs a line matching exitAfter regexp', async function () {
+ const proc = execa(process.execPath, [require.resolve('../__fixtures__/log_on_sigint')]);
+ await watchStdioForLine(proc, onLogLine, /listening for SIGINT/);
+ expect(onLogLine.mock.calls).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "listening for SIGINT",
+ ],
+ ]
+ `);
+});
diff --git a/src/dev/build/lib/platform.js b/src/dev/build/lib/platform.js
deleted file mode 100644
index ab2672615e1c5..0000000000000
--- a/src/dev/build/lib/platform.js
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export function createPlatform(name, architecture, buildName) {
- return new (class Platform {
- getName() {
- return name;
- }
-
- getArchitecture() {
- return architecture;
- }
-
- getBuildName() {
- return buildName;
- }
-
- getNodeArch() {
- return `${name}-${architecture}`;
- }
-
- isWindows() {
- return name === 'win32';
- }
-
- isMac() {
- return name === 'darwin';
- }
-
- isLinux() {
- return name === 'linux';
- }
- })();
-}
diff --git a/src/dev/build/lib/platform.test.ts b/src/dev/build/lib/platform.test.ts
new file mode 100644
index 0000000000000..a93333c57e75e
--- /dev/null
+++ b/src/dev/build/lib/platform.test.ts
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { Platform } from './platform';
+
+describe('getName()', () => {
+ it('returns the name argument', () => {
+ expect(new Platform('win32', 'x64', 'foo').getName()).toBe('win32');
+ });
+});
+
+describe('getNodeArch()', () => {
+ it('returns the node arch for the passed name', () => {
+ expect(new Platform('win32', 'x64', 'foo').getNodeArch()).toBe('win32-x64');
+ });
+});
+
+describe('getBuildName()', () => {
+ it('returns the build name for the passed name', () => {
+ expect(new Platform('linux', 'arm64', 'linux-aarch64').getBuildName()).toBe('linux-aarch64');
+ });
+});
+
+describe('isWindows()', () => {
+ it('returns true if name is win32', () => {
+ expect(new Platform('win32', 'x64', 'foo').isWindows()).toBe(true);
+ expect(new Platform('linux', 'x64', 'foo').isWindows()).toBe(false);
+ expect(new Platform('darwin', 'x64', 'foo').isWindows()).toBe(false);
+ });
+});
+
+describe('isLinux()', () => {
+ it('returns true if name is linux', () => {
+ expect(new Platform('win32', 'x64', 'foo').isLinux()).toBe(false);
+ expect(new Platform('linux', 'x64', 'foo').isLinux()).toBe(true);
+ expect(new Platform('darwin', 'x64', 'foo').isLinux()).toBe(false);
+ });
+});
+
+describe('isMac()', () => {
+ it('returns true if name is darwin', () => {
+ expect(new Platform('win32', 'x64', 'foo').isMac()).toBe(false);
+ expect(new Platform('linux', 'x64', 'foo').isMac()).toBe(false);
+ expect(new Platform('darwin', 'x64', 'foo').isMac()).toBe(true);
+ });
+});
diff --git a/src/dev/build/lib/platform.ts b/src/dev/build/lib/platform.ts
new file mode 100644
index 0000000000000..f42c7eb7fba54
--- /dev/null
+++ b/src/dev/build/lib/platform.ts
@@ -0,0 +1,64 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export type PlatformName = 'win32' | 'darwin' | 'linux';
+export type PlatformArchitecture = 'x64' | 'arm64';
+
+export class Platform {
+ constructor(
+ private name: PlatformName,
+ private architecture: PlatformArchitecture,
+ private buildName: string
+ ) {}
+
+ getName() {
+ return this.name;
+ }
+
+ getArchitecture() {
+ return this.architecture;
+ }
+
+ getBuildName() {
+ return this.buildName;
+ }
+
+ getNodeArch() {
+ return `${this.name}-${this.architecture}`;
+ }
+
+ isWindows() {
+ return this.name === 'win32';
+ }
+
+ isMac() {
+ return this.name === 'darwin';
+ }
+
+ isLinux() {
+ return this.name === 'linux';
+ }
+}
+
+export const ALL_PLATFORMS = [
+ new Platform('linux', 'x64', 'linux-x86_64'),
+ new Platform('linux', 'arm64', 'linux-aarch64'),
+ new Platform('darwin', 'x64', 'darwin-x86_64'),
+ new Platform('win32', 'x64', 'windows-x86_64'),
+];
diff --git a/src/dev/build/lib/runner.test.ts b/src/dev/build/lib/runner.test.ts
new file mode 100644
index 0000000000000..0e17f2f590e3d
--- /dev/null
+++ b/src/dev/build/lib/runner.test.ts
@@ -0,0 +1,248 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ ToolingLog,
+ ToolingLogCollectingWriter,
+ createStripAnsiSerializer,
+ createRecursiveSerializer,
+} from '@kbn/dev-utils';
+import { Config } from './config';
+import { createRunner } from './runner';
+import { Build } from './build';
+import { isErrorLogged, markErrorLogged } from './errors';
+
+jest.mock('./version_info');
+
+const testWriter = new ToolingLogCollectingWriter();
+const log = new ToolingLog();
+log.setWriters([testWriter]);
+
+expect.addSnapshotSerializer(createStripAnsiSerializer());
+
+const STACK_TRACE = /(\│\s+)at .+ \(.+\)$/;
+const isStackTrace = (x: any) => typeof x === 'string' && STACK_TRACE.test(x);
+
+expect.addSnapshotSerializer(
+ createRecursiveSerializer(
+ (v) => Array.isArray(v) && v.some(isStackTrace),
+ (v) => {
+ const start = v.findIndex(isStackTrace);
+ v[start] = v[start].replace(STACK_TRACE, '$1');
+ while (isStackTrace(v[start + 1])) v.splice(start + 1, 1);
+ return v;
+ }
+ )
+);
+
+beforeEach(() => {
+ testWriter.messages.length = 0;
+ jest.clearAllMocks();
+});
+
+const setup = async (opts: { buildDefaultDist: boolean; buildOssDist: boolean }) => {
+ const config = await Config.create({
+ isRelease: true,
+ targetAllPlatforms: true,
+ versionQualifier: '-SNAPSHOT',
+ });
+
+ const run = createRunner({
+ config,
+ log,
+ ...opts,
+ });
+
+ return { config, run };
+};
+
+describe('buildOssDist = true, buildDefaultDist = true', () => {
+ it('runs global task once, passing config and log', async () => {
+ const { config, run } = await setup({
+ buildDefaultDist: true,
+ buildOssDist: true,
+ });
+
+ const mock = jest.fn();
+
+ await run({
+ global: true,
+ description: 'foo',
+ run: mock,
+ });
+
+ expect(mock).toHaveBeenCalledTimes(1);
+ expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build), expect.any(Build)]);
+ });
+
+ it('calls local tasks twice, passing each build', async () => {
+ const { config, run } = await setup({
+ buildDefaultDist: true,
+ buildOssDist: true,
+ });
+
+ const mock = jest.fn();
+
+ await run({
+ description: 'foo',
+ run: mock,
+ });
+
+ expect(mock).toHaveBeenCalledTimes(2);
+ expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build));
+ });
+});
+
+describe('just default dist', () => {
+ it('runs global task once, passing config and log', async () => {
+ const { config, run } = await setup({
+ buildDefaultDist: true,
+ buildOssDist: false,
+ });
+
+ const mock = jest.fn();
+
+ await run({
+ global: true,
+ description: 'foo',
+ run: mock,
+ });
+
+ expect(mock).toHaveBeenCalledTimes(1);
+ expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build)]);
+ });
+
+ it('calls local tasks once, passing the default build', async () => {
+ const { config, run } = await setup({
+ buildDefaultDist: true,
+ buildOssDist: false,
+ });
+
+ const mock = jest.fn();
+
+ await run({
+ description: 'foo',
+ run: mock,
+ });
+
+ expect(mock).toHaveBeenCalledTimes(1);
+ expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build));
+ const [args] = mock.mock.calls;
+ const [, , build] = args;
+ if (build.isOss()) {
+ throw new Error('expected build to be the default dist, not the oss dist');
+ }
+ });
+});
+
+describe('just oss dist', () => {
+ it('runs global task once, passing config and log', async () => {
+ const { config, run } = await setup({
+ buildDefaultDist: false,
+ buildOssDist: true,
+ });
+
+ const mock = jest.fn();
+
+ await run({
+ global: true,
+ description: 'foo',
+ run: mock,
+ });
+
+ expect(mock).toHaveBeenCalledTimes(1);
+ expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build)]);
+ });
+
+ it('calls local tasks once, passing the oss build', async () => {
+ const { config, run } = await setup({
+ buildDefaultDist: false,
+ buildOssDist: true,
+ });
+
+ const mock = jest.fn();
+
+ await run({
+ description: 'foo',
+ run: mock,
+ });
+
+ expect(mock).toHaveBeenCalledTimes(1);
+ expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build));
+ const [args] = mock.mock.calls;
+ const [, , build] = args;
+ if (!build.isOss()) {
+ throw new Error('expected build to be the oss dist, not the default dist');
+ }
+ });
+});
+
+describe('task rejection', () => {
+ it('rejects, logs error, and marks error logged', async () => {
+ const { run } = await setup({
+ buildDefaultDist: true,
+ buildOssDist: false,
+ });
+
+ const error = new Error('FOO');
+ expect(isErrorLogged(error)).toBe(false);
+
+ const promise = run({
+ description: 'foo',
+ async run() {
+ throw error;
+ },
+ });
+
+ await expect(promise).rejects.toThrowErrorMatchingInlineSnapshot(`"FOO"`);
+ expect(testWriter.messages).toMatchInlineSnapshot(`
+ Array [
+ " info [ kibana ] foo",
+ " │ERROR failure 0 sec",
+ " │ERROR Error: FOO",
+ " │ ",
+ "",
+ ]
+ `);
+ expect(isErrorLogged(error)).toBe(true);
+ });
+
+ it('just rethrows errors that have already been logged', async () => {
+ const { run } = await setup({
+ buildDefaultDist: true,
+ buildOssDist: false,
+ });
+
+ const error = markErrorLogged(new Error('FOO'));
+ const promise = run({
+ description: 'foo',
+ async run() {
+ throw error;
+ },
+ });
+
+ await expect(promise).rejects.toThrowErrorMatchingInlineSnapshot(`"FOO"`);
+ expect(testWriter.messages).toMatchInlineSnapshot(`
+ Array [
+ " info [ kibana ] foo",
+ "",
+ ]
+ `);
+ });
+});
diff --git a/src/dev/build/lib/runner.js b/src/dev/build/lib/runner.ts
similarity index 72%
rename from src/dev/build/lib/runner.js
rename to src/dev/build/lib/runner.ts
index 363cfbe97afad..6b7d175bb229a 100644
--- a/src/dev/build/lib/runner.js
+++ b/src/dev/build/lib/runner.ts
@@ -18,13 +18,33 @@
*/
import chalk from 'chalk';
+import { ToolingLog } from '@kbn/dev-utils';
import { isErrorLogged, markErrorLogged } from './errors';
+import { Build } from './build';
+import { Config } from './config';
-import { createBuild } from './build';
+interface Options {
+ config: Config;
+ log: ToolingLog;
+ buildOssDist: boolean;
+ buildDefaultDist: boolean;
+}
+
+export interface GlobalTask {
+ global: true;
+ description: string;
+ run(config: Config, log: ToolingLog, builds: Build[]): Promise;
+}
+
+export interface Task {
+ global?: false;
+ description: string;
+ run(config: Config, log: ToolingLog, build: Build): Promise;
+}
-export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
- async function execTask(desc, task, ...args) {
+export function createRunner({ config, log, buildOssDist, buildDefaultDist }: Options) {
+ async function execTask(desc: string, task: Task | GlobalTask, lastArg: any) {
log.info(desc);
log.indent(4);
@@ -37,11 +57,11 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
};
try {
- await task.run(config, log, ...args);
+ await task.run(config, log, lastArg);
log.success(chalk.green('✓'), time());
} catch (error) {
if (!isErrorLogged(error)) {
- log.error('failure', time());
+ log.error(`failure ${time()}`);
log.error(error);
markErrorLogged(error);
}
@@ -53,22 +73,12 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
}
}
- const builds = [];
+ const builds: Build[] = [];
if (buildDefaultDist) {
- builds.push(
- createBuild({
- config,
- oss: false,
- })
- );
+ builds.push(new Build(config, false));
}
if (buildOssDist) {
- builds.push(
- createBuild({
- config,
- oss: true,
- })
- );
+ builds.push(new Build(config, true));
}
/**
@@ -76,11 +86,8 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
* `config`: an object with methods for determining top-level config values, see `./config.js`
* `log`: an instance of the `ToolingLog`, see `../../tooling_log/tooling_log.js`
* `builds?`: If task does is not defined as `global: true` then it is called for each build and passed each one here.
- *
- * @param {Task} task
- * @return {Promise}
*/
- return async function run(task) {
+ return async function run(task: Task | GlobalTask) {
if (task.global) {
await execTask(chalk`{dim [ global ]} ${task.description}`, task, builds);
} else {
diff --git a/src/dev/build/lib/version_info.test.ts b/src/dev/build/lib/version_info.test.ts
new file mode 100644
index 0000000000000..1b0c71bf9220e
--- /dev/null
+++ b/src/dev/build/lib/version_info.test.ts
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import pkg from '../../../../package.json';
+import { getVersionInfo } from './version_info';
+
+describe('isRelease = true', () => {
+ it('returns unchanged package.version, build sha, and build number', async () => {
+ const versionInfo = await getVersionInfo({
+ isRelease: true,
+ pkg,
+ });
+
+ expect(versionInfo).toHaveProperty('buildVersion', pkg.version);
+ expect(versionInfo).toHaveProperty('buildSha', expect.stringMatching(/^[0-9a-f]{40}$/));
+ expect(versionInfo).toHaveProperty('buildNumber');
+ expect(versionInfo.buildNumber).toBeGreaterThan(1000);
+ });
+});
+
+describe('isRelease = false', () => {
+ it('returns snapshot version, build sha, and build number', async () => {
+ const versionInfo = await getVersionInfo({
+ isRelease: false,
+ pkg,
+ });
+
+ expect(versionInfo).toHaveProperty('buildVersion', expect.stringContaining(pkg.version));
+ expect(versionInfo).toHaveProperty('buildVersion', expect.stringMatching(/-SNAPSHOT$/));
+ expect(versionInfo).toHaveProperty('buildSha', expect.stringMatching(/^[0-9a-f]{40}$/));
+ expect(versionInfo).toHaveProperty('buildNumber');
+ expect(versionInfo.buildNumber).toBeGreaterThan(1000);
+ });
+});
+
+describe('versionQualifier', () => {
+ it('appends a version qualifier', async () => {
+ const versionInfo = await getVersionInfo({
+ isRelease: true,
+ versionQualifier: 'beta55',
+ pkg,
+ });
+
+ expect(versionInfo).toHaveProperty('buildVersion', pkg.version + '-beta55');
+ });
+});
diff --git a/src/dev/build/lib/version_info.js b/src/dev/build/lib/version_info.ts
similarity index 84%
rename from src/dev/build/lib/version_info.js
rename to src/dev/build/lib/version_info.ts
index 3a053afdbff8b..958112c524bac 100644
--- a/src/dev/build/lib/version_info.js
+++ b/src/dev/build/lib/version_info.ts
@@ -34,7 +34,19 @@ async function getBuildNumber() {
return parseFloat(wc.stdout.trim());
}
-export async function getVersionInfo({ isRelease, versionQualifier, pkg }) {
+interface Options {
+ isRelease: boolean;
+ versionQualifier?: string;
+ pkg: {
+ version: string;
+ };
+}
+
+type ResolvedType> = T extends Promise ? X : never;
+
+export type VersionInfo = ResolvedType>;
+
+export async function getVersionInfo({ isRelease, versionQualifier, pkg }: Options) {
const buildVersion = pkg.version.concat(
versionQualifier ? `-${versionQualifier}` : '',
isRelease ? '' : '-SNAPSHOT'
diff --git a/src/legacy/utils/watch_stdio_for_line.js b/src/dev/build/lib/watch_stdio_for_line.ts
similarity index 83%
rename from src/legacy/utils/watch_stdio_for_line.js
rename to src/dev/build/lib/watch_stdio_for_line.ts
index 01323b4d4e967..2322d017abc61 100644
--- a/src/legacy/utils/watch_stdio_for_line.js
+++ b/src/dev/build/lib/watch_stdio_for_line.ts
@@ -18,8 +18,13 @@
*/
import { Transform } from 'stream';
+import { ExecaChildProcess } from 'execa';
-import { createPromiseFromStreams, createSplitStream, createMapStream } from './streams';
+import {
+ createPromiseFromStreams,
+ createSplitStream,
+ createMapStream,
+} from '../../../legacy/utils/streams';
// creates a stream that skips empty lines unless they are followed by
// another line, preventing the empty lines produced by splitStream
@@ -27,7 +32,7 @@ function skipLastEmptyLineStream() {
let skippedEmptyLine = false;
return new Transform({
objectMode: true,
- transform(line, enc, cb) {
+ transform(line, _, cb) {
if (skippedEmptyLine) {
this.push('');
skippedEmptyLine = false;
@@ -37,14 +42,18 @@ function skipLastEmptyLineStream() {
skippedEmptyLine = true;
return cb();
} else {
- return cb(null, line);
+ return cb(undefined, line);
}
},
});
}
-export async function watchStdioForLine(proc, logFn, exitAfter) {
- function onLogLine(line) {
+export async function watchStdioForLine(
+ proc: ExecaChildProcess,
+ logFn: (line: string) => void,
+ exitAfter?: RegExp
+) {
+ function onLogLine(line: string) {
logFn(line);
if (exitAfter && exitAfter.test(line)) {
diff --git a/src/dev/build/tasks/bin/copy_bin_scripts_task.js b/src/dev/build/tasks/bin/copy_bin_scripts_task.ts
similarity index 92%
rename from src/dev/build/tasks/bin/copy_bin_scripts_task.js
rename to src/dev/build/tasks/bin/copy_bin_scripts_task.ts
index f620f12b17d88..d0ef0a58eebd5 100644
--- a/src/dev/build/tasks/bin/copy_bin_scripts_task.js
+++ b/src/dev/build/tasks/bin/copy_bin_scripts_task.ts
@@ -17,9 +17,9 @@
* under the License.
*/
-import { copyAll } from '../../lib';
+import { copyAll, Task } from '../../lib';
-export const CopyBinScriptsTask = {
+export const CopyBinScripts: Task = {
description: 'Copying bin scripts into platform-generic build directory',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/bin/index.js b/src/dev/build/tasks/bin/index.ts
similarity index 92%
rename from src/dev/build/tasks/bin/index.js
rename to src/dev/build/tasks/bin/index.ts
index e970ac5ec044b..dc30f626decc4 100644
--- a/src/dev/build/tasks/bin/index.js
+++ b/src/dev/build/tasks/bin/index.ts
@@ -17,4 +17,4 @@
* under the License.
*/
-export { CopyBinScriptsTask } from './copy_bin_scripts_task';
+export * from './copy_bin_scripts_task';
diff --git a/src/dev/build/tasks/build_kibana_platform_plugins.js b/src/dev/build/tasks/build_kibana_platform_plugins.ts
similarity index 92%
rename from src/dev/build/tasks/build_kibana_platform_plugins.js
rename to src/dev/build/tasks/build_kibana_platform_plugins.ts
index 153a3120f896f..08637677fcfbe 100644
--- a/src/dev/build/tasks/build_kibana_platform_plugins.js
+++ b/src/dev/build/tasks/build_kibana_platform_plugins.ts
@@ -25,9 +25,11 @@ import {
reportOptimizerStats,
} from '@kbn/optimizer';
-export const BuildKibanaPlatformPluginsTask = {
+import { Task } from '../lib';
+
+export const BuildKibanaPlatformPlugins: Task = {
description: 'Building distributable versions of Kibana platform plugins',
- async run(_, log, build) {
+ async run(config, log, build) {
const optimizerConfig = OptimizerConfig.create({
repoRoot: build.resolvePath(),
cache: false,
diff --git a/src/dev/build/tasks/build_packages_task.js b/src/dev/build/tasks/build_packages_task.ts
similarity index 97%
rename from src/dev/build/tasks/build_packages_task.js
rename to src/dev/build/tasks/build_packages_task.ts
index b31855aa42dac..dd4e88f9c2b74 100644
--- a/src/dev/build/tasks/build_packages_task.js
+++ b/src/dev/build/tasks/build_packages_task.ts
@@ -18,7 +18,8 @@
*/
import { buildProductionProjects } from '@kbn/pm';
-import { mkdirp } from '../lib';
+
+import { mkdirp, Task } from '../lib';
/**
* High-level overview of how we enable shared packages in production:
@@ -66,8 +67,7 @@ import { mkdirp } from '../lib';
* in some way by Kibana itself in production, as it won't otherwise be
* included in the production build.
*/
-
-export const BuildPackagesTask = {
+export const BuildPackages: Task = {
description: 'Building distributable versions of packages',
async run(config, log, build) {
await mkdirp(config.resolveFromRepo('target'));
diff --git a/src/dev/build/tasks/clean_tasks.js b/src/dev/build/tasks/clean_tasks.ts
similarity index 92%
rename from src/dev/build/tasks/clean_tasks.js
rename to src/dev/build/tasks/clean_tasks.ts
index ff5c3b3a73dd3..b519b17e591a3 100644
--- a/src/dev/build/tasks/clean_tasks.js
+++ b/src/dev/build/tasks/clean_tasks.ts
@@ -19,9 +19,9 @@
import minimatch from 'minimatch';
-import { deleteAll, deleteEmptyFolders, scanDelete } from '../lib';
+import { deleteAll, deleteEmptyFolders, scanDelete, Task, GlobalTask } from '../lib';
-export const CleanTask = {
+export const Clean: GlobalTask = {
global: true,
description: 'Cleaning artifacts from previous builds',
@@ -37,7 +37,7 @@ export const CleanTask = {
},
};
-export const CleanPackagesTask = {
+export const CleanPackages: Task = {
description: 'Cleaning source for packages that are now installed in node_modules',
async run(config, log, build) {
@@ -45,7 +45,7 @@ export const CleanPackagesTask = {
},
};
-export const CleanTypescriptTask = {
+export const CleanTypescript: Task = {
description: 'Cleaning typescript source files that have been transpiled to JS',
async run(config, log, build) {
@@ -59,11 +59,11 @@ export const CleanTypescriptTask = {
},
};
-export const CleanExtraFilesFromModulesTask = {
+export const CleanExtraFilesFromModules: Task = {
description: 'Cleaning tests, examples, docs, etc. from node_modules',
async run(config, log, build) {
- const makeRegexps = (patterns) =>
+ const makeRegexps = (patterns: string[]) =>
patterns.map((pattern) => minimatch.makeRe(pattern, { nocase: true }));
const regularExpressions = makeRegexps([
@@ -181,7 +181,7 @@ export const CleanExtraFilesFromModulesTask = {
},
};
-export const CleanExtraBinScriptsTask = {
+export const CleanExtraBinScripts: Task = {
description: 'Cleaning extra bin/* scripts from platform-specific builds',
async run(config, log, build) {
@@ -201,7 +201,7 @@ export const CleanExtraBinScriptsTask = {
},
};
-export const CleanEmptyFoldersTask = {
+export const CleanEmptyFolders: Task = {
description: 'Cleaning all empty folders recursively',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/copy_source_task.js b/src/dev/build/tasks/copy_source_task.ts
similarity index 95%
rename from src/dev/build/tasks/copy_source_task.js
rename to src/dev/build/tasks/copy_source_task.ts
index 52809449ba338..221c9162bd2a9 100644
--- a/src/dev/build/tasks/copy_source_task.js
+++ b/src/dev/build/tasks/copy_source_task.ts
@@ -17,9 +17,9 @@
* under the License.
*/
-import { copyAll } from '../lib';
+import { copyAll, Task } from '../lib';
-export const CopySourceTask = {
+export const CopySource: Task = {
description: 'Copying source into platform-generic build directory',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/create_archives_sources_task.js b/src/dev/build/tasks/create_archives_sources_task.ts
similarity index 95%
rename from src/dev/build/tasks/create_archives_sources_task.js
rename to src/dev/build/tasks/create_archives_sources_task.ts
index 76f08bd3d2e4f..72f875b431933 100644
--- a/src/dev/build/tasks/create_archives_sources_task.js
+++ b/src/dev/build/tasks/create_archives_sources_task.ts
@@ -17,10 +17,10 @@
* under the License.
*/
-import { scanCopy } from '../lib';
+import { scanCopy, Task } from '../lib';
import { getNodeDownloadInfo } from './nodejs';
-export const CreateArchivesSourcesTask = {
+export const CreateArchivesSources: Task = {
description: 'Creating platform-specific archive source directories',
async run(config, log, build) {
await Promise.all(
diff --git a/src/dev/build/tasks/create_archives_task.js b/src/dev/build/tasks/create_archives_task.ts
similarity index 80%
rename from src/dev/build/tasks/create_archives_task.js
rename to src/dev/build/tasks/create_archives_task.ts
index 541b9551dbc9b..3ffb1afef7469 100644
--- a/src/dev/build/tasks/create_archives_task.js
+++ b/src/dev/build/tasks/create_archives_task.ts
@@ -23,11 +23,11 @@ import { promisify } from 'util';
import { CiStatsReporter } from '@kbn/dev-utils';
-import { mkdirp, compress } from '../lib';
+import { mkdirp, compressTar, compressZip, Task } from '../lib';
const asyncStat = promisify(Fs.stat);
-export const CreateArchivesTask = {
+export const CreateArchives: Task = {
description: 'Creating the archives for each platform',
async run(config, log, build) {
@@ -49,19 +49,16 @@ export const CreateArchivesTask = {
path: destination,
});
- await compress(
- 'zip',
- {
- archiverOptions: {
- zlib: {
- level: 9,
- },
+ await compressZip({
+ source,
+ destination,
+ archiverOptions: {
+ zlib: {
+ level: 9,
},
- createRootDirectory: true,
},
- source,
- destination
- );
+ createRootDirectory: true,
+ });
break;
case '.gz':
@@ -70,20 +67,17 @@ export const CreateArchivesTask = {
path: destination,
});
- await compress(
- 'tar',
- {
- archiverOptions: {
- gzip: true,
- gzipOptions: {
- level: 9,
- },
+ await compressTar({
+ source,
+ destination,
+ archiverOptions: {
+ gzip: true,
+ gzipOptions: {
+ level: 9,
},
- createRootDirectory: true,
},
- source,
- destination
- );
+ createRootDirectory: true,
+ });
break;
default:
diff --git a/src/dev/build/tasks/create_empty_dirs_and_files_task.js b/src/dev/build/tasks/create_empty_dirs_and_files_task.ts
similarity index 92%
rename from src/dev/build/tasks/create_empty_dirs_and_files_task.js
rename to src/dev/build/tasks/create_empty_dirs_and_files_task.ts
index 6bf059ca9519b..a72c6a4598338 100644
--- a/src/dev/build/tasks/create_empty_dirs_and_files_task.js
+++ b/src/dev/build/tasks/create_empty_dirs_and_files_task.ts
@@ -17,9 +17,9 @@
* under the License.
*/
-import { mkdirp, write } from '../lib';
+import { mkdirp, write, Task } from '../lib';
-export const CreateEmptyDirsAndFilesTask = {
+export const CreateEmptyDirsAndFiles: Task = {
description: 'Creating some empty directories and files to prevent file-permission issues',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/create_package_json_task.js b/src/dev/build/tasks/create_package_json_task.ts
similarity index 92%
rename from src/dev/build/tasks/create_package_json_task.js
rename to src/dev/build/tasks/create_package_json_task.ts
index e7a410b4c6350..5d7fdb9eae2f0 100644
--- a/src/dev/build/tasks/create_package_json_task.js
+++ b/src/dev/build/tasks/create_package_json_task.ts
@@ -19,9 +19,9 @@
import { copyWorkspacePackages } from '@kbn/pm';
-import { read, write } from '../lib';
+import { read, write, Task } from '../lib';
-export const CreatePackageJsonTask = {
+export const CreatePackageJson: Task = {
description: 'Creating build-ready version of package.json',
async run(config, log, build) {
@@ -38,7 +38,7 @@ export const CreatePackageJsonTask = {
number: config.getBuildNumber(),
sha: config.getBuildSha(),
distributable: true,
- release: config.isRelease(),
+ release: config.isRelease,
},
repository: pkg.repository,
engines: {
@@ -59,7 +59,7 @@ export const CreatePackageJsonTask = {
},
};
-export const RemovePackageJsonDepsTask = {
+export const RemovePackageJsonDeps: Task = {
description: 'Removing dependencies from package.json',
async run(config, log, build) {
@@ -74,7 +74,7 @@ export const RemovePackageJsonDepsTask = {
},
};
-export const RemoveWorkspacesTask = {
+export const RemoveWorkspaces: Task = {
description: 'Remove workspace artifacts',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/create_readme_task.js b/src/dev/build/tasks/create_readme_task.ts
similarity index 93%
rename from src/dev/build/tasks/create_readme_task.js
rename to src/dev/build/tasks/create_readme_task.ts
index 8d60dad9b5633..379ca45f43e26 100644
--- a/src/dev/build/tasks/create_readme_task.js
+++ b/src/dev/build/tasks/create_readme_task.ts
@@ -17,9 +17,9 @@
* under the License.
*/
-import { write, read } from '../lib';
+import { write, read, Task } from '../lib';
-export const CreateReadmeTask = {
+export const CreateReadme: Task = {
description: 'Creating README.md file',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/index.js b/src/dev/build/tasks/index.ts
similarity index 92%
rename from src/dev/build/tasks/index.js
rename to src/dev/build/tasks/index.ts
index 0a3a67313d6a4..4c00e56faee6b 100644
--- a/src/dev/build/tasks/index.js
+++ b/src/dev/build/tasks/index.ts
@@ -27,7 +27,6 @@ export * from './create_archives_task';
export * from './create_empty_dirs_and_files_task';
export * from './create_package_json_task';
export * from './create_readme_task';
-export * from './install_chromium';
export * from './install_dependencies_task';
export * from './license_file_task';
export * from './nodejs';
@@ -41,3 +40,6 @@ export * from './transpile_scss_task';
export * from './uuid_verification_task';
export * from './verify_env_task';
export * from './write_sha_sums_task';
+
+// @ts-expect-error this module can't be TS because it ends up pulling x-pack into Kibana
+export { InstallChromium } from './install_chromium';
diff --git a/src/dev/build/tasks/install_chromium.js b/src/dev/build/tasks/install_chromium.js
index c5878b23d43ae..3ae36d1615ccd 100644
--- a/src/dev/build/tasks/install_chromium.js
+++ b/src/dev/build/tasks/install_chromium.js
@@ -17,11 +17,12 @@
* under the License.
*/
+import { first } from 'rxjs/operators';
+
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { installBrowser } from '../../../../x-pack/plugins/reporting/server/browsers/install';
-import { first } from 'rxjs/operators';
-export const InstallChromiumTask = {
+export const InstallChromium = {
description: 'Installing Chromium',
async run(config, log, build) {
@@ -32,6 +33,7 @@ export const InstallChromiumTask = {
log.info(`Installing Chromium for ${platform.getName()}-${platform.getArchitecture()}`);
const { binaryPath$ } = installBrowser(
+ // TODO: https://github.com/elastic/kibana/issues/72496
log,
build.resolvePathForPlatform(platform, 'x-pack/plugins/reporting/chromium'),
platform.getName(),
diff --git a/src/dev/build/tasks/install_dependencies_task.js b/src/dev/build/tasks/install_dependencies_task.ts
similarity index 94%
rename from src/dev/build/tasks/install_dependencies_task.js
rename to src/dev/build/tasks/install_dependencies_task.ts
index 5191899cd94d0..32fd23859456e 100644
--- a/src/dev/build/tasks/install_dependencies_task.js
+++ b/src/dev/build/tasks/install_dependencies_task.ts
@@ -19,7 +19,9 @@
import { Project } from '@kbn/pm';
-export const InstallDependenciesTask = {
+import { Task } from '../lib';
+
+export const InstallDependencies: Task = {
description: 'Installing node_modules, including production builds of packages',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/license_file_task.js b/src/dev/build/tasks/license_file_task.ts
similarity index 94%
rename from src/dev/build/tasks/license_file_task.js
rename to src/dev/build/tasks/license_file_task.ts
index 1a7c70738aa47..f1b65501d076f 100644
--- a/src/dev/build/tasks/license_file_task.js
+++ b/src/dev/build/tasks/license_file_task.ts
@@ -17,9 +17,9 @@
* under the License.
*/
-import { write, read } from '../lib';
+import { write, read, Task } from '../lib';
-export const UpdateLicenseFileTask = {
+export const UpdateLicenseFile: Task = {
description: 'Updating LICENSE.txt file',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/nodejs/__tests__/download_node_builds_task.js b/src/dev/build/tasks/nodejs/__tests__/download_node_builds_task.js
deleted file mode 100644
index c1764d06b43b3..0000000000000
--- a/src/dev/build/tasks/nodejs/__tests__/download_node_builds_task.js
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import sinon from 'sinon';
-import expect from '@kbn/expect';
-
-import * as NodeShasumsNS from '../node_shasums';
-import * as NodeDownloadInfoNS from '../node_download_info';
-import * as DownloadNS from '../../../lib/download'; // sinon can't stub '../../../lib' properly
-import { DownloadNodeBuildsTask } from '../download_node_builds_task';
-
-describe('src/dev/build/tasks/nodejs/download_node_builds_task', () => {
- const sandbox = sinon.createSandbox();
- afterEach(() => {
- sandbox.restore();
- });
-
- function setup({ failOnUrl } = {}) {
- const platforms = [{ getName: () => 'foo' }, { getName: () => 'bar' }];
-
- const log = {};
- const config = {
- getNodePlatforms: () => platforms,
- getNodeVersion: () => 'nodeVersion',
- };
-
- sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').callsFake((config, platform) => {
- return {
- url: `${platform.getName()}:url`,
- downloadPath: `${platform.getName()}:downloadPath`,
- downloadName: `${platform.getName()}:downloadName`,
- };
- });
-
- sandbox.stub(NodeShasumsNS, 'getNodeShasums').returns({
- 'foo:downloadName': 'foo:sha256',
- 'bar:downloadName': 'bar:sha256',
- });
-
- sandbox.stub(DownloadNS, 'download').callsFake(({ url }) => {
- if (url === failOnUrl) {
- throw new Error('Download failed for reasons');
- }
- });
-
- return { log, config };
- }
-
- it('downloads node builds for each platform', async () => {
- const { log, config } = setup();
-
- await DownloadNodeBuildsTask.run(config, log);
-
- sinon.assert.calledTwice(DownloadNS.download);
- sinon.assert.calledWithExactly(DownloadNS.download, {
- log,
- url: 'foo:url',
- sha256: 'foo:sha256',
- destination: 'foo:downloadPath',
- retries: 3,
- });
- sinon.assert.calledWithExactly(DownloadNS.download, {
- log,
- url: 'bar:url',
- sha256: 'bar:sha256',
- destination: 'bar:downloadPath',
- retries: 3,
- });
- });
-
- it('rejects if any download fails', async () => {
- const { config, log } = setup({ failOnUrl: 'foo:url' });
-
- try {
- await DownloadNodeBuildsTask.run(config, log);
- throw new Error('Expected DownloadNodeBuildsTask to reject');
- } catch (error) {
- expect(error).to.have.property('message').be('Download failed for reasons');
- }
- });
-});
diff --git a/src/dev/build/tasks/nodejs/__tests__/extract_node_builds_task.js b/src/dev/build/tasks/nodejs/__tests__/extract_node_builds_task.js
deleted file mode 100644
index efb7aaa3a2209..0000000000000
--- a/src/dev/build/tasks/nodejs/__tests__/extract_node_builds_task.js
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import sinon from 'sinon';
-import { resolve } from 'path';
-import * as NodeDownloadInfoNS from '../node_download_info';
-import * as FsNS from '../../../lib/fs';
-import { ExtractNodeBuildsTask } from '../extract_node_builds_task';
-
-describe('src/dev/build/tasks/node_extract_node_builds_task', () => {
- const sandbox = sinon.createSandbox();
- afterEach(() => {
- sandbox.restore();
- });
-
- it('copies downloadPath to extractDir/node.exe for windows platform', async () => {
- sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').returns({
- downloadPath: 'downloadPath',
- extractDir: 'extractDir',
- });
-
- sandbox.stub(ExtractNodeBuildsTask, 'copyWindows');
- sandbox.stub(FsNS, 'untar');
-
- const platform = {
- isWindows: () => true,
- };
-
- const config = {
- getNodePlatforms: () => [platform],
- };
-
- await ExtractNodeBuildsTask.run(config);
-
- sinon.assert.calledOnce(NodeDownloadInfoNS.getNodeDownloadInfo);
- sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platform);
-
- sinon.assert.calledOnce(ExtractNodeBuildsTask.copyWindows);
- sinon.assert.calledWithExactly(
- ExtractNodeBuildsTask.copyWindows,
- 'downloadPath',
- resolve('extractDir/node.exe')
- );
-
- sinon.assert.notCalled(FsNS.untar);
- });
-
- it('untars downloadPath to extractDir, stripping the top level of the archive, for non-windows platforms', async () => {
- sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').returns({
- downloadPath: 'downloadPath',
- extractDir: 'extractDir',
- });
-
- sandbox.stub(ExtractNodeBuildsTask, 'copyWindows');
- sandbox.stub(FsNS, 'untar');
-
- const platform = {
- isWindows: () => false,
- };
-
- const config = {
- getNodePlatforms: () => [platform],
- };
-
- await ExtractNodeBuildsTask.run(config);
-
- sinon.assert.calledOnce(NodeDownloadInfoNS.getNodeDownloadInfo);
- sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platform);
-
- sinon.assert.notCalled(ExtractNodeBuildsTask.copyWindows);
-
- sinon.assert.calledOnce(FsNS.untar);
- sinon.assert.calledWithExactly(FsNS.untar, 'downloadPath', 'extractDir', {
- strip: 1,
- });
- });
-});
diff --git a/src/dev/build/tasks/nodejs/__tests__/verify_existing_node_builds_task.js b/src/dev/build/tasks/nodejs/__tests__/verify_existing_node_builds_task.js
deleted file mode 100644
index a8f732a869d2d..0000000000000
--- a/src/dev/build/tasks/nodejs/__tests__/verify_existing_node_builds_task.js
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import sinon from 'sinon';
-import expect from '@kbn/expect';
-
-import * as NodeShasumsNS from '../node_shasums';
-import * as NodeDownloadInfoNS from '../node_download_info';
-import * as FsNS from '../../../lib/fs';
-import { VerifyExistingNodeBuildsTask } from '../verify_existing_node_builds_task';
-
-describe('src/dev/build/tasks/nodejs/verify_existing_node_builds_task', () => {
- const sandbox = sinon.createSandbox();
- afterEach(() => {
- sandbox.restore();
- });
-
- function setup({ nodeShasums } = {}) {
- const platforms = [
- { getName: () => 'foo', getNodeArch: () => 'foo:nodeArch' },
- { getName: () => 'bar', getNodeArch: () => 'bar:nodeArch' },
- ];
-
- const log = { success: sinon.stub() };
- const config = {
- getNodePlatforms: () => platforms,
- getNodeVersion: () => 'nodeVersion',
- };
-
- sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').callsFake((config, platform) => {
- return {
- url: `${platform.getName()}:url`,
- downloadPath: `${platform.getName()}:downloadPath`,
- downloadName: `${platform.getName()}:downloadName`,
- };
- });
-
- sandbox.stub(NodeShasumsNS, 'getNodeShasums').returns(
- nodeShasums || {
- 'foo:downloadName': 'foo:sha256',
- 'bar:downloadName': 'bar:sha256',
- }
- );
-
- sandbox.stub(FsNS, 'getFileHash').callsFake((path) => {
- switch (path) {
- case 'foo:downloadPath':
- return 'foo:sha256';
- case 'bar:downloadPath':
- return 'bar:sha256';
- }
- });
-
- return { log, config, platforms };
- }
-
- it('downloads node builds for each platform', async () => {
- const { log, config, platforms } = setup();
-
- await VerifyExistingNodeBuildsTask.run(config, log);
-
- sinon.assert.calledOnce(NodeShasumsNS.getNodeShasums);
-
- sinon.assert.calledTwice(NodeDownloadInfoNS.getNodeDownloadInfo);
- sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platforms[0]);
- sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platforms[1]);
-
- sinon.assert.calledTwice(FsNS.getFileHash);
- sinon.assert.calledWithExactly(FsNS.getFileHash, 'foo:downloadPath', 'sha256');
- sinon.assert.calledWithExactly(FsNS.getFileHash, 'bar:downloadPath', 'sha256');
- });
-
- it('rejects if any download has an incorrect sha256', async () => {
- const { config, log } = setup({
- nodeShasums: {
- 'foo:downloadName': 'foo:sha256',
- 'bar:downloadName': 'bar:invalid',
- },
- });
-
- try {
- await VerifyExistingNodeBuildsTask.run(config, log);
- throw new Error('Expected VerifyExistingNodeBuildsTask to reject');
- } catch (error) {
- expect(error)
- .to.have.property('message')
- .be('Download at bar:downloadPath does not match expected checksum bar:sha256');
- }
- });
-});
diff --git a/src/dev/build/tasks/nodejs/clean_node_builds_task.js b/src/dev/build/tasks/nodejs/clean_node_builds_task.ts
similarity index 93%
rename from src/dev/build/tasks/nodejs/clean_node_builds_task.js
rename to src/dev/build/tasks/nodejs/clean_node_builds_task.ts
index a34e65a394115..9deeb9f73de28 100644
--- a/src/dev/build/tasks/nodejs/clean_node_builds_task.js
+++ b/src/dev/build/tasks/nodejs/clean_node_builds_task.ts
@@ -17,9 +17,9 @@
* under the License.
*/
-import { deleteAll } from '../../lib';
+import { deleteAll, Task } from '../../lib';
-export const CleanNodeBuildsTask = {
+export const CleanNodeBuilds: Task = {
description: 'Cleaning npm from node',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/nodejs/download_node_builds_task.test.ts b/src/dev/build/tasks/nodejs/download_node_builds_task.test.ts
new file mode 100644
index 0000000000000..6f08c8aa69750
--- /dev/null
+++ b/src/dev/build/tasks/nodejs/download_node_builds_task.test.ts
@@ -0,0 +1,136 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ ToolingLog,
+ ToolingLogCollectingWriter,
+ createAnyInstanceSerializer,
+} from '@kbn/dev-utils';
+
+import { Config, Platform } from '../../lib';
+import { DownloadNodeBuilds } from './download_node_builds_task';
+
+// import * as NodeShasumsNS from '../node_shasums';
+// import * as NodeDownloadInfoNS from '../node_download_info';
+// import * as DownloadNS from '../../../lib/download';
+// import { DownloadNodeBuilds } from '../download_node_builds_task';
+jest.mock('./node_shasums');
+jest.mock('./node_download_info');
+jest.mock('../../lib/download');
+
+expect.addSnapshotSerializer(createAnyInstanceSerializer(ToolingLog));
+
+const { getNodeDownloadInfo } = jest.requireMock('./node_download_info');
+const { getNodeShasums } = jest.requireMock('./node_shasums');
+const { download } = jest.requireMock('../../lib/download');
+
+const log = new ToolingLog();
+const testWriter = new ToolingLogCollectingWriter();
+log.setWriters([testWriter]);
+
+beforeEach(() => {
+ testWriter.messages.length = 0;
+ jest.clearAllMocks();
+});
+
+async function setup({ failOnUrl }: { failOnUrl?: string } = {}) {
+ const config = await Config.create({
+ isRelease: true,
+ targetAllPlatforms: true,
+ });
+
+ getNodeDownloadInfo.mockImplementation((_: Config, platform: Platform) => {
+ return {
+ url: `${platform.getName()}:url`,
+ downloadPath: `${platform.getName()}:downloadPath`,
+ downloadName: `${platform.getName()}:downloadName`,
+ };
+ });
+
+ getNodeShasums.mockReturnValue({
+ 'linux:downloadName': 'linux:sha256',
+ 'darwin:downloadName': 'darwin:sha256',
+ 'win32:downloadName': 'win32:sha256',
+ });
+
+ download.mockImplementation(({ url }: any) => {
+ if (url === failOnUrl) {
+ throw new Error('Download failed for reasons');
+ }
+ });
+
+ return { config };
+}
+
+it('downloads node builds for each platform', async () => {
+ const { config } = await setup();
+
+ await DownloadNodeBuilds.run(config, log, []);
+
+ expect(download.mock.calls).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ Object {
+ "destination": "linux:downloadPath",
+ "log": ,
+ "retries": 3,
+ "sha256": "linux:sha256",
+ "url": "linux:url",
+ },
+ ],
+ Array [
+ Object {
+ "destination": "linux:downloadPath",
+ "log": ,
+ "retries": 3,
+ "sha256": "linux:sha256",
+ "url": "linux:url",
+ },
+ ],
+ Array [
+ Object {
+ "destination": "darwin:downloadPath",
+ "log": ,
+ "retries": 3,
+ "sha256": "darwin:sha256",
+ "url": "darwin:url",
+ },
+ ],
+ Array [
+ Object {
+ "destination": "win32:downloadPath",
+ "log": ,
+ "retries": 3,
+ "sha256": "win32:sha256",
+ "url": "win32:url",
+ },
+ ],
+ ]
+ `);
+ expect(testWriter.messages).toMatchInlineSnapshot(`Array []`);
+});
+
+it('rejects if any download fails', async () => {
+ const { config } = await setup({ failOnUrl: 'linux:url' });
+
+ await expect(DownloadNodeBuilds.run(config, log, [])).rejects.toMatchInlineSnapshot(
+ `[Error: Download failed for reasons]`
+ );
+ expect(testWriter.messages).toMatchInlineSnapshot(`Array []`);
+});
diff --git a/src/dev/build/tasks/nodejs/download_node_builds_task.js b/src/dev/build/tasks/nodejs/download_node_builds_task.ts
similarity index 93%
rename from src/dev/build/tasks/nodejs/download_node_builds_task.js
rename to src/dev/build/tasks/nodejs/download_node_builds_task.ts
index c0907e6c42a97..ad42ea11436f5 100644
--- a/src/dev/build/tasks/nodejs/download_node_builds_task.js
+++ b/src/dev/build/tasks/nodejs/download_node_builds_task.ts
@@ -17,11 +17,11 @@
* under the License.
*/
-import { download } from '../../lib';
+import { download, GlobalTask } from '../../lib';
import { getNodeShasums } from './node_shasums';
import { getNodeDownloadInfo } from './node_download_info';
-export const DownloadNodeBuildsTask = {
+export const DownloadNodeBuilds: GlobalTask = {
global: true,
description: 'Downloading node.js builds for all platforms',
async run(config, log) {
diff --git a/src/dev/build/tasks/nodejs/extract_node_builds_task.test.ts b/src/dev/build/tasks/nodejs/extract_node_builds_task.test.ts
new file mode 100644
index 0000000000000..94c421f7c9a62
--- /dev/null
+++ b/src/dev/build/tasks/nodejs/extract_node_builds_task.test.ts
@@ -0,0 +1,108 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ ToolingLog,
+ ToolingLogCollectingWriter,
+ createAbsolutePathSerializer,
+} from '@kbn/dev-utils';
+
+import { Config } from '../../lib';
+import { ExtractNodeBuilds } from './extract_node_builds_task';
+
+jest.mock('../../lib/fs');
+
+const Fs = jest.requireMock('../../lib/fs');
+
+const log = new ToolingLog();
+const testWriter = new ToolingLogCollectingWriter();
+log.setWriters([testWriter]);
+
+expect.addSnapshotSerializer(createAbsolutePathSerializer());
+
+async function setup() {
+ const config = await Config.create({
+ isRelease: true,
+ targetAllPlatforms: true,
+ });
+
+ return { config };
+}
+
+beforeEach(() => {
+ testWriter.messages.length = 0;
+ jest.clearAllMocks();
+});
+
+it('runs expected fs operations', async () => {
+ const { config } = await setup();
+
+ await ExtractNodeBuilds.run(config, log, []);
+
+ const usedMethods = Object.fromEntries(
+ Object.entries(Fs)
+ .filter((entry): entry is [string, jest.Mock] => {
+ const [, mock] = entry;
+
+ if (typeof mock !== 'function') {
+ return false;
+ }
+
+ return (mock as jest.Mock).mock.calls.length > 0;
+ })
+ .map(([name, mock]) => [name, mock.mock.calls])
+ );
+
+ expect(usedMethods).toMatchInlineSnapshot(`
+ Object {
+ "copy": Array [
+ Array [
+ /.node_binaries/10.21.0/node.exe,
+ /.node_binaries/10.21.0/win32-x64/node.exe,
+ Object {
+ "clone": true,
+ },
+ ],
+ ],
+ "untar": Array [
+ Array [
+ /.node_binaries/10.21.0/node-v10.21.0-linux-x64.tar.gz,
+ /.node_binaries/10.21.0/linux-x64,
+ Object {
+ "strip": 1,
+ },
+ ],
+ Array [
+ /.node_binaries/10.21.0/node-v10.21.0-linux-arm64.tar.gz,
+ /.node_binaries/10.21.0/linux-arm64,
+ Object {
+ "strip": 1,
+ },
+ ],
+ Array [
+ /.node_binaries/10.21.0/node-v10.21.0-darwin-x64.tar.gz,
+ /.node_binaries/10.21.0/darwin-x64,
+ Object {
+ "strip": 1,
+ },
+ ],
+ ],
+ }
+ `);
+});
diff --git a/src/dev/build/tasks/nodejs/extract_node_builds_task.js b/src/dev/build/tasks/nodejs/extract_node_builds_task.ts
similarity index 56%
rename from src/dev/build/tasks/nodejs/extract_node_builds_task.js
rename to src/dev/build/tasks/nodejs/extract_node_builds_task.ts
index caf0a389b4cc0..aaa3312c8ba3f 100644
--- a/src/dev/build/tasks/nodejs/extract_node_builds_task.js
+++ b/src/dev/build/tasks/nodejs/extract_node_builds_task.ts
@@ -17,39 +17,27 @@
* under the License.
*/
-import { dirname, resolve } from 'path';
-import fs from 'fs';
-import { promisify } from 'util';
+import Path from 'path';
-import { untar, mkdirp } from '../../lib';
+import { untar, GlobalTask, copy } from '../../lib';
import { getNodeDownloadInfo } from './node_download_info';
-const statAsync = promisify(fs.stat);
-const copyFileAsync = promisify(fs.copyFile);
-
-export const ExtractNodeBuildsTask = {
+export const ExtractNodeBuilds: GlobalTask = {
global: true,
description: 'Extracting node.js builds for all platforms',
async run(config) {
await Promise.all(
config.getNodePlatforms().map(async (platform) => {
const { downloadPath, extractDir } = getNodeDownloadInfo(config, platform);
- // windows executable is not extractable, it's just an .exe file
if (platform.isWindows()) {
- const destination = resolve(extractDir, 'node.exe');
- return this.copyWindows(downloadPath, destination);
+ // windows executable is not extractable, it's just an .exe file
+ await copy(downloadPath, Path.resolve(extractDir, 'node.exe'), {
+ clone: true,
+ });
+ } else {
+ await untar(downloadPath, extractDir, { strip: 1 });
}
-
- // all other downloads are tarballs
- return untar(downloadPath, extractDir, { strip: 1 });
})
);
},
- async copyWindows(source, destination) {
- // ensure source exists before creating destination directory
- await statAsync(source);
- await mkdirp(dirname(destination));
- // for performance reasons, do a copy-on-write by using the fs.constants.COPYFILE_FICLONE flag
- return await copyFileAsync(source, destination, fs.constants.COPYFILE_FICLONE);
- },
};
diff --git a/src/dev/build/tasks/nodejs/index.js b/src/dev/build/tasks/nodejs/index.js
deleted file mode 100644
index e52dba73e4a96..0000000000000
--- a/src/dev/build/tasks/nodejs/index.js
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export { getNodeDownloadInfo } from './node_download_info';
-
-export { DownloadNodeBuildsTask } from './download_node_builds_task';
-export { ExtractNodeBuildsTask } from './extract_node_builds_task';
-export { VerifyExistingNodeBuildsTask } from './verify_existing_node_builds_task';
-export { CleanNodeBuildsTask } from './clean_node_builds_task';
diff --git a/src/dev/build/tasks/nodejs/index.ts b/src/dev/build/tasks/nodejs/index.ts
new file mode 100644
index 0000000000000..8dd65418fb445
--- /dev/null
+++ b/src/dev/build/tasks/nodejs/index.ts
@@ -0,0 +1,24 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export * from './node_download_info';
+export * from './download_node_builds_task';
+export * from './extract_node_builds_task';
+export * from './verify_existing_node_builds_task';
+export * from './clean_node_builds_task';
diff --git a/src/dev/build/tasks/nodejs/node_download_info.js b/src/dev/build/tasks/nodejs/node_download_info.ts
similarity index 92%
rename from src/dev/build/tasks/nodejs/node_download_info.js
rename to src/dev/build/tasks/nodejs/node_download_info.ts
index 33ffd042d85a3..b2c62d6667fd4 100644
--- a/src/dev/build/tasks/nodejs/node_download_info.js
+++ b/src/dev/build/tasks/nodejs/node_download_info.ts
@@ -19,7 +19,9 @@
import { basename } from 'path';
-export function getNodeDownloadInfo(config, platform) {
+import { Config, Platform } from '../../lib';
+
+export function getNodeDownloadInfo(config: Config, platform: Platform) {
const version = config.getNodeVersion();
const arch = platform.getNodeArch();
diff --git a/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.test.ts b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.test.ts
new file mode 100644
index 0000000000000..f24b7ffc59c14
--- /dev/null
+++ b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.test.ts
@@ -0,0 +1,225 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ ToolingLog,
+ ToolingLogCollectingWriter,
+ createAnyInstanceSerializer,
+} from '@kbn/dev-utils';
+
+import { Config, Platform } from '../../lib';
+import { VerifyExistingNodeBuilds } from './verify_existing_node_builds_task';
+
+jest.mock('./node_shasums');
+jest.mock('./node_download_info');
+jest.mock('../../lib/fs');
+
+const { getNodeShasums } = jest.requireMock('./node_shasums');
+const { getNodeDownloadInfo } = jest.requireMock('./node_download_info');
+const { getFileHash } = jest.requireMock('../../lib/fs');
+
+const log = new ToolingLog();
+const testWriter = new ToolingLogCollectingWriter();
+log.setWriters([testWriter]);
+
+expect.addSnapshotSerializer(createAnyInstanceSerializer(Config));
+
+async function setup(actualShaSums?: Record) {
+ const config = await Config.create({
+ isRelease: true,
+ targetAllPlatforms: true,
+ });
+
+ getNodeShasums.mockReturnValue(
+ Object.fromEntries(
+ config.getTargetPlatforms().map((platform) => {
+ return [`${platform.getName()}:${platform.getNodeArch()}:downloadName`, 'valid shasum'];
+ })
+ )
+ );
+
+ getNodeDownloadInfo.mockImplementation((_: Config, platform: Platform) => {
+ return {
+ downloadPath: `${platform.getName()}:${platform.getNodeArch()}:downloadPath`,
+ downloadName: `${platform.getName()}:${platform.getNodeArch()}:downloadName`,
+ };
+ });
+
+ getFileHash.mockImplementation((downloadPath: string) => {
+ if (actualShaSums?.[downloadPath]) {
+ return actualShaSums[downloadPath];
+ }
+
+ return 'valid shasum';
+ });
+
+ return { config };
+}
+
+beforeEach(() => {
+ testWriter.messages.length = 0;
+ jest.clearAllMocks();
+});
+
+it('checks shasums for each downloaded node build', async () => {
+ const { config } = await setup();
+
+ await VerifyExistingNodeBuilds.run(config, log, []);
+
+ expect(getNodeShasums).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ "10.21.0",
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Object {
+ "darwin:darwin-x64:downloadName": "valid shasum",
+ "linux:linux-arm64:downloadName": "valid shasum",
+ "linux:linux-x64:downloadName": "valid shasum",
+ "win32:win32-x64:downloadName": "valid shasum",
+ },
+ },
+ ],
+ }
+ `);
+ expect(getNodeDownloadInfo).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ ,
+ Platform {
+ "architecture": "x64",
+ "buildName": "linux-x86_64",
+ "name": "linux",
+ },
+ ],
+ Array [
+ ,
+ Platform {
+ "architecture": "arm64",
+ "buildName": "linux-aarch64",
+ "name": "linux",
+ },
+ ],
+ Array [
+ ,
+ Platform {
+ "architecture": "x64",
+ "buildName": "darwin-x86_64",
+ "name": "darwin",
+ },
+ ],
+ Array [
+ ,
+ Platform {
+ "architecture": "x64",
+ "buildName": "windows-x86_64",
+ "name": "win32",
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Object {
+ "downloadName": "linux:linux-x64:downloadName",
+ "downloadPath": "linux:linux-x64:downloadPath",
+ },
+ },
+ Object {
+ "type": "return",
+ "value": Object {
+ "downloadName": "linux:linux-arm64:downloadName",
+ "downloadPath": "linux:linux-arm64:downloadPath",
+ },
+ },
+ Object {
+ "type": "return",
+ "value": Object {
+ "downloadName": "darwin:darwin-x64:downloadName",
+ "downloadPath": "darwin:darwin-x64:downloadPath",
+ },
+ },
+ Object {
+ "type": "return",
+ "value": Object {
+ "downloadName": "win32:win32-x64:downloadName",
+ "downloadPath": "win32:win32-x64:downloadPath",
+ },
+ },
+ ],
+ }
+ `);
+ expect(getFileHash).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ "linux:linux-x64:downloadPath",
+ "sha256",
+ ],
+ Array [
+ "linux:linux-arm64:downloadPath",
+ "sha256",
+ ],
+ Array [
+ "darwin:darwin-x64:downloadPath",
+ "sha256",
+ ],
+ Array [
+ "win32:win32-x64:downloadPath",
+ "sha256",
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": "valid shasum",
+ },
+ Object {
+ "type": "return",
+ "value": "valid shasum",
+ },
+ Object {
+ "type": "return",
+ "value": "valid shasum",
+ },
+ Object {
+ "type": "return",
+ "value": "valid shasum",
+ },
+ ],
+ }
+ `);
+});
+
+it('rejects if any download has an incorrect sha256', async () => {
+ const { config } = await setup({
+ 'linux:linux-arm64:downloadPath': 'invalid shasum',
+ });
+
+ await expect(
+ VerifyExistingNodeBuilds.run(config, log, [])
+ ).rejects.toThrowErrorMatchingInlineSnapshot(
+ `"Download at linux:linux-arm64:downloadPath does not match expected checksum invalid shasum"`
+ );
+});
diff --git a/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.js b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.ts
similarity index 93%
rename from src/dev/build/tasks/nodejs/verify_existing_node_builds_task.js
rename to src/dev/build/tasks/nodejs/verify_existing_node_builds_task.ts
index b320471fda33f..9ce0778d2d1f0 100644
--- a/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.js
+++ b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.ts
@@ -17,11 +17,11 @@
* under the License.
*/
-import { getFileHash } from '../../lib';
+import { getFileHash, GlobalTask } from '../../lib';
import { getNodeDownloadInfo } from './node_download_info';
import { getNodeShasums } from './node_shasums';
-export const VerifyExistingNodeBuildsTask = {
+export const VerifyExistingNodeBuilds: GlobalTask = {
global: true,
description: 'Verifying previously downloaded node.js build for all platforms',
async run(config, log) {
diff --git a/src/dev/build/tasks/notice_file_task.js b/src/dev/build/tasks/notice_file_task.ts
similarity index 95%
rename from src/dev/build/tasks/notice_file_task.js
rename to src/dev/build/tasks/notice_file_task.ts
index 59369c7cb5a3b..6edb76d506bc0 100644
--- a/src/dev/build/tasks/notice_file_task.js
+++ b/src/dev/build/tasks/notice_file_task.ts
@@ -20,11 +20,11 @@
import { getInstalledPackages } from '../../npm';
import { LICENSE_OVERRIDES } from '../../license_checker';
-import { write } from '../lib';
+import { write, Task } from '../lib';
import { getNodeDownloadInfo } from './nodejs';
import { generateNoticeFromSource, generateBuildNoticeText } from '../../notice';
-export const CreateNoticeFileTask = {
+export const CreateNoticeFile: Task = {
description: 'Generating NOTICE.txt file',
async run(config, log, build) {
@@ -40,7 +40,7 @@ export const CreateNoticeFileTask = {
log.info('Discovering installed packages');
const packages = await getInstalledPackages({
directory: build.resolvePath(),
- dev: false,
+ includeDev: false,
licenseOverrides: LICENSE_OVERRIDES,
});
diff --git a/src/dev/build/tasks/optimize_task.js b/src/dev/build/tasks/optimize_task.ts
similarity index 95%
rename from src/dev/build/tasks/optimize_task.js
rename to src/dev/build/tasks/optimize_task.ts
index 16a7537b8ac9e..98979f376eacd 100644
--- a/src/dev/build/tasks/optimize_task.js
+++ b/src/dev/build/tasks/optimize_task.ts
@@ -17,10 +17,10 @@
* under the License.
*/
-import { deleteAll, copyAll, exec } from '../lib';
+import { deleteAll, copyAll, exec, Task } from '../lib';
import { getNodeDownloadInfo } from './nodejs';
-export const OptimizeBuildTask = {
+export const OptimizeBuild: Task = {
description: 'Running optimizer',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/os_packages/create_os_package_tasks.js b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts
similarity index 89%
rename from src/dev/build/tasks/os_packages/create_os_package_tasks.js
rename to src/dev/build/tasks/os_packages/create_os_package_tasks.ts
index 6a00e681ab0ec..4580b95423d3d 100644
--- a/src/dev/build/tasks/os_packages/create_os_package_tasks.js
+++ b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts
@@ -17,10 +17,11 @@
* under the License.
*/
+import { Task } from '../../lib';
import { runFpm } from './run_fpm';
import { runDockerGenerator, runDockerGeneratorForUBI } from './docker_generator';
-export const CreateDebPackageTask = {
+export const CreateDebPackage: Task = {
description: 'Creating deb package',
async run(config, log, build) {
@@ -33,7 +34,7 @@ export const CreateDebPackageTask = {
},
};
-export const CreateRpmPackageTask = {
+export const CreateRpmPackage: Task = {
description: 'Creating rpm package',
async run(config, log, build) {
@@ -41,7 +42,7 @@ export const CreateRpmPackageTask = {
},
};
-export const CreateDockerPackageTask = {
+export const CreateDockerPackage: Task = {
description: 'Creating docker package',
async run(config, log, build) {
@@ -50,7 +51,7 @@ export const CreateDockerPackageTask = {
},
};
-export const CreateDockerUbiPackageTask = {
+export const CreateDockerUbiPackage: Task = {
description: 'Creating docker ubi package',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js
index bbcb6dfeeb109..3f34a84057668 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js
+++ b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js
@@ -18,7 +18,7 @@
*/
import { resolve } from 'path';
-import { compress, copyAll, mkdirp, write } from '../../../lib';
+import { compressTar, copyAll, mkdirp, write } from '../../../lib';
import { dockerfileTemplate } from './templates';
export async function bundleDockerFiles(config, log, build, scope) {
@@ -50,8 +50,7 @@ export async function bundleDockerFiles(config, log, build, scope) {
// Compress dockerfiles dir created inside
// docker build dir as output it as a target
// on targets folder
- await compress(
- 'tar',
+ await compressTar(
{
archiverOptions: {
gzip: true,
diff --git a/src/dev/build/tasks/os_packages/index.js b/src/dev/build/tasks/os_packages/docker_generator/index.ts
similarity index 84%
rename from src/dev/build/tasks/os_packages/index.js
rename to src/dev/build/tasks/os_packages/docker_generator/index.ts
index 82626c47b6087..78d2b197dc7b2 100644
--- a/src/dev/build/tasks/os_packages/index.js
+++ b/src/dev/build/tasks/os_packages/docker_generator/index.ts
@@ -17,9 +17,5 @@
* under the License.
*/
-export {
- CreateRpmPackageTask,
- CreateDebPackageTask,
- CreateDockerPackageTask,
- CreateDockerUbiPackageTask,
-} from './create_os_package_tasks';
+// @ts-expect-error not ts yet
+export { runDockerGenerator, runDockerGeneratorForUBI } from './run';
diff --git a/src/dev/build/tasks/os_packages/index.ts b/src/dev/build/tasks/os_packages/index.ts
new file mode 100644
index 0000000000000..439fde71d255f
--- /dev/null
+++ b/src/dev/build/tasks/os_packages/index.ts
@@ -0,0 +1,20 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export * from './create_os_package_tasks';
diff --git a/src/dev/build/tasks/os_packages/run_fpm.js b/src/dev/build/tasks/os_packages/run_fpm.ts
similarity index 91%
rename from src/dev/build/tasks/os_packages/run_fpm.js
rename to src/dev/build/tasks/os_packages/run_fpm.ts
index eb77da0e70176..b5169ec3d43b6 100644
--- a/src/dev/build/tasks/os_packages/run_fpm.js
+++ b/src/dev/build/tasks/os_packages/run_fpm.ts
@@ -19,15 +19,23 @@
import { resolve } from 'path';
-import { exec } from '../../lib';
+import { ToolingLog } from '@kbn/dev-utils';
-export async function runFpm(config, log, build, type, pkgSpecificFlags) {
+import { exec, Config, Build } from '../../lib';
+
+export async function runFpm(
+ config: Config,
+ log: ToolingLog,
+ build: Build,
+ type: 'rpm' | 'deb',
+ pkgSpecificFlags: string[]
+) {
const linux = config.getPlatform('linux', 'x64');
const version = config.getBuildVersion();
- const resolveWithTrailingSlash = (...paths) => `${resolve(...paths)}/`;
+ const resolveWithTrailingSlash = (...paths: string[]) => `${resolve(...paths)}/`;
- const fromBuild = (...paths) => build.resolvePathForPlatform(linux, ...paths);
+ const fromBuild = (...paths: string[]) => build.resolvePathForPlatform(linux, ...paths);
const pickLicense = () => {
if (build.isOss()) {
diff --git a/src/dev/build/tasks/patch_native_modules_task.js b/src/dev/build/tasks/patch_native_modules_task.ts
similarity index 82%
rename from src/dev/build/tasks/patch_native_modules_task.js
rename to src/dev/build/tasks/patch_native_modules_task.ts
index c30d1fd774b55..b56d01b616462 100644
--- a/src/dev/build/tasks/patch_native_modules_task.js
+++ b/src/dev/build/tasks/patch_native_modules_task.ts
@@ -16,14 +16,30 @@
* specific language governing permissions and limitations
* under the License.
*/
-import fs from 'fs';
+
import path from 'path';
-import util from 'util';
-import { deleteAll, download, gunzip, untar } from '../lib';
+
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { deleteAll, download, gunzip, untar, Task, Config, Build, Platform, read } from '../lib';
const DOWNLOAD_DIRECTORY = '.native_modules';
-const packages = [
+interface Package {
+ name: string;
+ version: string;
+ destinationPath: string;
+ extractMethod: string;
+ archives: Record<
+ string,
+ {
+ url: string;
+ sha256: string;
+ }
+ >;
+}
+
+const packages: Package[] = [
{
name: 're2',
version: '1.15.4',
@@ -46,16 +62,22 @@ const packages = [
},
];
-async function getInstalledVersion(config, packageName) {
+async function getInstalledVersion(config: Config, packageName: string) {
const packageJSONPath = config.resolveFromRepo(
path.join('node_modules', packageName, 'package.json')
);
- const buffer = await util.promisify(fs.readFile)(packageJSONPath);
- const packageJSON = JSON.parse(buffer);
+ const json = await read(packageJSONPath);
+ const packageJSON = JSON.parse(json);
return packageJSON.version;
}
-async function patchModule(config, log, build, platform, pkg) {
+async function patchModule(
+ config: Config,
+ log: ToolingLog,
+ build: Build,
+ platform: Platform,
+ pkg: Package
+) {
const installedVersion = await getInstalledVersion(config, pkg.name);
if (installedVersion !== pkg.version) {
throw new Error(
@@ -89,7 +111,7 @@ async function patchModule(config, log, build, platform, pkg) {
}
}
-export const PatchNativeModulesTask = {
+export const PatchNativeModules: Task = {
description: 'Patching platform-specific native modules',
async run(config, log, build) {
for (const pkg of packages) {
diff --git a/src/dev/build/tasks/path_length_task.js b/src/dev/build/tasks/path_length_task.ts
similarity index 95%
rename from src/dev/build/tasks/path_length_task.js
rename to src/dev/build/tasks/path_length_task.ts
index 29ab9ce5a2499..d639217adc53b 100644
--- a/src/dev/build/tasks/path_length_task.js
+++ b/src/dev/build/tasks/path_length_task.ts
@@ -21,9 +21,9 @@ import { relative } from 'path';
import { tap, filter, map, toArray } from 'rxjs/operators';
-import { scan$ } from '../lib/scan';
+import { scan$, Task } from '../lib';
-export const PathLengthTask = {
+export const PathLength: Task = {
description: 'Checking Windows for paths > 200 characters',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/transpile_babel_task.js b/src/dev/build/tasks/transpile_babel_task.ts
similarity index 80%
rename from src/dev/build/tasks/transpile_babel_task.js
rename to src/dev/build/tasks/transpile_babel_task.ts
index f476ead9183fe..a1e994587ce92 100644
--- a/src/dev/build/tasks/transpile_babel_task.js
+++ b/src/dev/build/tasks/transpile_babel_task.ts
@@ -17,15 +17,21 @@
* under the License.
*/
+import { pipeline } from 'stream';
+import { promisify } from 'util';
+
+// @ts-expect-error @types/gulp-babel is outdated and doesn't work for gulp-babel v8
import gulpBabel from 'gulp-babel';
import vfs from 'vinyl-fs';
-import { createPromiseFromStreams } from '../../../legacy/utils';
+import { Task, Build } from '../lib';
+
+const asyncPipeline = promisify(pipeline);
-const transpileWithBabel = async (srcGlobs, build, presets) => {
+const transpileWithBabel = async (srcGlobs: string[], build: Build, presets: string[]) => {
const buildRoot = build.resolvePath();
- await createPromiseFromStreams([
+ await asyncPipeline(
vfs.src(
srcGlobs.concat([
'!**/*.d.ts',
@@ -44,11 +50,11 @@ const transpileWithBabel = async (srcGlobs, build, presets) => {
presets,
}),
- vfs.dest(buildRoot),
- ]);
+ vfs.dest(buildRoot)
+ );
};
-export const TranspileBabelTask = {
+export const TranspileBabel: Task = {
description: 'Transpiling sources with babel',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/transpile_scss_task.js b/src/dev/build/tasks/transpile_scss_task.ts
similarity index 89%
rename from src/dev/build/tasks/transpile_scss_task.js
rename to src/dev/build/tasks/transpile_scss_task.ts
index d1c76d97c8853..e1b0bd0171c92 100644
--- a/src/dev/build/tasks/transpile_scss_task.js
+++ b/src/dev/build/tasks/transpile_scss_task.ts
@@ -17,9 +17,12 @@
* under the License.
*/
+import { Task } from '../lib';
+
+// @ts-expect-error buildSass isn't TS yet
import { buildSass } from '../../sass';
-export const TranspileScssTask = {
+export const TranspileScss: Task = {
description: 'Transpiling SCSS to CSS',
async run(config, log, build) {
await buildSass({
diff --git a/src/dev/build/tasks/uuid_verification_task.js b/src/dev/build/tasks/uuid_verification_task.ts
similarity index 94%
rename from src/dev/build/tasks/uuid_verification_task.js
rename to src/dev/build/tasks/uuid_verification_task.ts
index 32c9e73dba988..b65096690b681 100644
--- a/src/dev/build/tasks/uuid_verification_task.js
+++ b/src/dev/build/tasks/uuid_verification_task.ts
@@ -17,9 +17,9 @@
* under the License.
*/
-import { read } from '../lib';
+import { read, Task } from '../lib';
-export const UuidVerificationTask = {
+export const UuidVerification: Task = {
description: 'Verify that no UUID file is baked into the build',
async run(config, log, build) {
diff --git a/src/dev/build/tasks/verify_env_task.js b/src/dev/build/tasks/verify_env_task.ts
similarity index 93%
rename from src/dev/build/tasks/verify_env_task.js
rename to src/dev/build/tasks/verify_env_task.ts
index eb679411d7e38..975a620c1c540 100644
--- a/src/dev/build/tasks/verify_env_task.js
+++ b/src/dev/build/tasks/verify_env_task.ts
@@ -17,7 +17,9 @@
* under the License.
*/
-export const VerifyEnvTask = {
+import { GlobalTask } from '../lib';
+
+export const VerifyEnv: GlobalTask = {
global: true,
description: 'Verifying environment meets requirements',
diff --git a/src/dev/build/tasks/write_sha_sums_task.js b/src/dev/build/tasks/write_sha_sums_task.ts
similarity index 92%
rename from src/dev/build/tasks/write_sha_sums_task.js
rename to src/dev/build/tasks/write_sha_sums_task.ts
index c44924bb9ce09..abf938cd150ab 100644
--- a/src/dev/build/tasks/write_sha_sums_task.js
+++ b/src/dev/build/tasks/write_sha_sums_task.ts
@@ -19,9 +19,9 @@
import globby from 'globby';
-import { getFileHash, write } from '../lib';
+import { getFileHash, write, GlobalTask } from '../lib';
-export const WriteShaSumsTask = {
+export const WriteShaSums: GlobalTask = {
global: true,
description: 'Writing sha1sums of archives and packages in target directory',
diff --git a/src/legacy/utils/__tests__/watch_stdio_for_line.js b/src/legacy/utils/__tests__/watch_stdio_for_line.js
deleted file mode 100644
index 32d61658c1114..0000000000000
--- a/src/legacy/utils/__tests__/watch_stdio_for_line.js
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import execa from 'execa';
-import stripAnsi from 'strip-ansi';
-import sinon from 'sinon';
-
-import { watchStdioForLine } from '../watch_stdio_for_line';
-
-describe('src/legacy/utils/watch_stdio_for_line', function () {
- const sandbox = sinon.sandbox.create();
- afterEach(() => sandbox.reset());
-
- const onLogLine = sandbox.stub();
- const logFn = (line) => onLogLine(stripAnsi(line));
-
- it('calls logFn with log lines', async () => {
- const proc = execa(process.execPath, ['-e', 'console.log("hi")']);
-
- await watchStdioForLine(proc, logFn);
-
- // log output of the process
- sinon.assert.calledWithExactly(onLogLine, sinon.match(/hi/));
- });
-
- it('send the proc SIGKILL if it logs a line matching exitAfter regexp', async function () {
- // fixture proc will exit after 10 seconds if sigint not received, but the test won't fail
- // unless we see the log line `SIGINT not received`, so we let the test take up to 30 seconds
- // for potentially huge delays here and there
- this.timeout(30000);
-
- const proc = execa(process.execPath, [require.resolve('./fixtures/log_on_sigint')]);
-
- await watchStdioForLine(proc, logFn, /listening for SIGINT/);
-
- sinon.assert.calledWithExactly(onLogLine, sinon.match(/listening for SIGINT/));
- sinon.assert.neverCalledWith(onLogLine, sinon.match(/SIGINT not received/));
- });
-});
diff --git a/src/legacy/utils/index.js b/src/legacy/utils/index.js
index a4c0cdf958fc2..4274fb2e4901a 100644
--- a/src/legacy/utils/index.js
+++ b/src/legacy/utils/index.js
@@ -21,7 +21,6 @@ export { BinderBase } from './binder';
export { BinderFor } from './binder_for';
export { deepCloneWithBuffers } from './deep_clone_with_buffers';
export { unset } from './unset';
-export { watchStdioForLine } from './watch_stdio_for_line';
export { IS_KIBANA_DISTRIBUTABLE } from './artifact_type';
export { IS_KIBANA_RELEASE } from './artifact_type';
diff --git a/src/legacy/utils/streams/index.d.ts b/src/legacy/utils/streams/index.d.ts
index 5ef39b292c685..470b5d9fa3505 100644
--- a/src/legacy/utils/streams/index.d.ts
+++ b/src/legacy/utils/streams/index.d.ts
@@ -17,7 +17,7 @@
* under the License.
*/
-import { Readable, Transform, Writable, TransformOptions } from 'stream';
+import { Readable, Writable, Transform, TransformOptions } from 'stream';
export function concatStreamProviders(
sourceProviders: Array<() => Readable>,
diff --git a/x-pack/package.json b/x-pack/package.json
index 1de009ae1232f..39bdb76ac7a73 100644
--- a/x-pack/package.json
+++ b/x-pack/package.json
@@ -49,7 +49,7 @@
"@testing-library/react-hooks": "^3.2.1",
"@testing-library/jest-dom": "^5.8.0",
"@types/angular": "^1.6.56",
- "@types/archiver": "^3.0.0",
+ "@types/archiver": "^3.1.0",
"@types/base64-js": "^1.2.5",
"@types/boom": "^7.2.0",
"@types/cheerio": "^0.22.10",
diff --git a/yarn.lock b/yarn.lock
index 4cc802e328ab8..1bb8fab0372ae 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -4625,10 +4625,10 @@
resolved "https://registry.yarnpkg.com/@types/anymatch/-/anymatch-1.3.1.tgz#336badc1beecb9dacc38bea2cf32adf627a8421a"
integrity sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA==
-"@types/archiver@^3.0.0":
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-3.0.0.tgz#c0a53e0ed3b7aef626ce683d081d7821d8c638b4"
- integrity sha512-orghAMOF+//wSg4ru2znk6jt0eIPvKTtMVLH7XcYcjbcRyAXRClDlh27QVdqnAvVM37yu9xDP6Nh7egRhNr8tQ==
+"@types/archiver@^3.1.0":
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-3.1.0.tgz#0d5bd922ba5cf06e137cd6793db7942439b1805e"
+ integrity sha512-nTvHwgWONL+iXG+9CX+gnQ/tTOV+qucAjwpXqeUn4OCRMxP42T29FFP/7XaOo0EqqO3TlENhObeZEe7RUJAriw==
dependencies:
"@types/glob" "*"
From 085631b93ad034316b35e16f8cd880ffac0340bf Mon Sep 17 00:00:00 2001
From: Brent Kimmel
Date: Thu, 23 Jul 2020 14:04:52 -0400
Subject: [PATCH 27/59] Resolver node cube click should == button click
(#73085)
* Resolver node cube click == button click
---
.../public/resolver/view/process_event_dot.tsx | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx b/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx
index 05f2e0cbfcfa9..aed292e4a39d1 100644
--- a/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx
+++ b/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx
@@ -313,6 +313,14 @@ const UnstyledProcessEventDot = React.memo(
+
+ {rightContent && {rightContent}}
);
};
diff --git a/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts b/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts
index 8d470f6454b0e..2e7c91a26e1fc 100644
--- a/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts
+++ b/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts
@@ -94,7 +94,7 @@ export function useMultiContent({
const activeContentData: Partial = {};
for (const [id, _content] of Object.entries(contents.current)) {
- if (validation.contents[id as keyof T]) {
+ if (validation.contents[id as keyof T] !== false) {
const contentData = (_content as Content).getData();
// Replace the getData() handler with the cached value
@@ -161,7 +161,7 @@ export function useMultiContent({
);
/**
- * Validate the multi-content active content(s) in the DOM
+ * Validate the content(s) currently in the DOM
*/
const validate = useCallback(async () => {
if (Object.keys(contents.current).length === 0) {
diff --git a/src/plugins/es_ui_shared/public/global_flyout/index.ts b/src/plugins/es_ui_shared/public/global_flyout/index.ts
new file mode 100644
index 0000000000000..e876594337c1e
--- /dev/null
+++ b/src/plugins/es_ui_shared/public/global_flyout/index.ts
@@ -0,0 +1,23 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export {
+ GlobalFlyoutProvider,
+ useGlobalFlyout,
+} from '../../__packages_do_not_import__/global_flyout';
diff --git a/src/plugins/es_ui_shared/public/index.ts b/src/plugins/es_ui_shared/public/index.ts
index 98a305fe68f08..bdea5ccf5fe26 100644
--- a/src/plugins/es_ui_shared/public/index.ts
+++ b/src/plugins/es_ui_shared/public/index.ts
@@ -24,6 +24,7 @@
import * as Forms from './forms';
import * as Monaco from './monaco';
import * as ace from './ace';
+import * as GlobalFlyout from './global_flyout';
export { JsonEditor, OnJsonEditorUpdateHandler } from './components/json_editor';
@@ -65,7 +66,7 @@ export {
useAuthorizationContext,
} from './authorization';
-export { Monaco, Forms, ace };
+export { Monaco, Forms, ace, GlobalFlyout };
export { extractQueryParams } from './url';
diff --git a/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts b/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts
index 98287f6bac35d..733a60f1f86ff 100644
--- a/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts
+++ b/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts
@@ -64,9 +64,13 @@ interface StripEmptyFieldsOptions {
* @param options An optional configuration object. By default recursive it turned on.
*/
export const stripEmptyFields = (
- object: { [key: string]: any },
+ object?: { [key: string]: any },
options?: StripEmptyFieldsOptions
): { [key: string]: any } => {
+ if (object === undefined) {
+ return {};
+ }
+
const { types = ['string', 'object'], recursive = false } = options || {};
return Object.entries(object).reduce((acc, [key, value]) => {
diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts b/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts
index 907c749f8ec0b..12cf7ccac6c59 100644
--- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts
+++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts
@@ -92,6 +92,17 @@ const registerHttpRequestMockHelpers = (server: SinonFakeServer) => {
]);
};
+ const setSimulateTemplateResponse = (response?: HttpResponse, error?: any) => {
+ const status = error ? error.status || 400 : 200;
+ const body = error ? JSON.stringify(error.body) : JSON.stringify(response);
+
+ server.respondWith('POST', `${API_BASE_PATH}/index_templates/simulate`, [
+ status,
+ { 'Content-Type': 'application/json' },
+ body,
+ ]);
+ };
+
return {
setLoadTemplatesResponse,
setLoadIndicesResponse,
@@ -102,6 +113,7 @@ const registerHttpRequestMockHelpers = (server: SinonFakeServer) => {
setLoadTemplateResponse,
setCreateTemplateResponse,
setUpdateTemplateResponse,
+ setSimulateTemplateResponse,
};
};
diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx b/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx
index ad445f75f047c..e40cdc026210d 100644
--- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx
+++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx
@@ -14,6 +14,8 @@ import {
notificationServiceMock,
docLinksServiceMock,
} from '../../../../../../src/core/public/mocks';
+import { GlobalFlyout } from '../../../../../../src/plugins/es_ui_shared/public';
+
import { AppContextProvider } from '../../../public/application/app_context';
import { httpService } from '../../../public/application/services/http';
import { breadcrumbService } from '../../../public/application/services/breadcrumbs';
@@ -23,9 +25,11 @@ import { ExtensionsService } from '../../../public/services';
import { UiMetricService } from '../../../public/application/services/ui_metric';
import { setUiMetricService } from '../../../public/application/services/api';
import { setExtensionsService } from '../../../public/application/store/selectors';
+import { MappingsEditorProvider } from '../../../public/application/components';
import { init as initHttpRequests } from './http_requests';
const mockHttpClient = axios.create({ adapter: axiosXhrAdapter });
+const { GlobalFlyoutProvider } = GlobalFlyout;
export const services = {
extensionsService: new ExtensionsService(),
@@ -62,7 +66,11 @@ export const WithAppDependencies = (Comp: any, overridingDependencies: any = {})
const mergedDependencies = merge({}, appDependencies, overridingDependencies);
return (
-
+
+
+
+
+
);
};
diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts b/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts
index 9889ebe16ba1e..ecedf819e6185 100644
--- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts
+++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts
@@ -28,6 +28,7 @@ export type TestSubjects =
| 'legacyTemplateTable'
| 'manageTemplateButton'
| 'mappingsTabContent'
+ | 'previewTabContent'
| 'noAliasesCallout'
| 'noMappingsCallout'
| 'noSettingsCallout'
@@ -48,4 +49,5 @@ export type TestSubjects =
| 'templateList'
| 'templatesTab'
| 'templateTable'
- | 'viewButton';
+ | 'viewButton'
+ | 'simulateTemplatePreview';
diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts
index a397419053351..23b40f4cbd3d7 100644
--- a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts
+++ b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts
@@ -40,10 +40,15 @@ const createActions = (testBed: TestBed) => {
/**
* User Actions
*/
- const selectDetailsTab = (tab: 'summary' | 'settings' | 'mappings' | 'aliases') => {
- const tabs = ['summary', 'settings', 'mappings', 'aliases'];
+ const selectDetailsTab = async (
+ tab: 'summary' | 'settings' | 'mappings' | 'aliases' | 'preview'
+ ) => {
+ const tabs = ['summary', 'settings', 'mappings', 'aliases', 'preview'];
- testBed.find('templateDetails.tab').at(tabs.indexOf(tab)).simulate('click');
+ await act(async () => {
+ testBed.find('templateDetails.tab').at(tabs.indexOf(tab)).simulate('click');
+ });
+ testBed.component.update();
};
const clickReloadButton = () => {
diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts
index f7ebc0bcf632b..06f57896d4900 100644
--- a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts
+++ b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts
@@ -493,7 +493,7 @@ describe('Index Templates tab', () => {
});
describe('tabs', () => {
- test('should have 4 tabs', async () => {
+ test('should have 5 tabs', async () => {
const template = fixtures.getTemplate({
name: `a${getRandomString()}`,
indexPatterns: ['template1Pattern1*', 'template1Pattern2'],
@@ -524,35 +524,48 @@ describe('Index Templates tab', () => {
const { find, actions, exists } = testBed;
httpRequestsMockHelpers.setLoadTemplateResponse(template);
+ httpRequestsMockHelpers.setSimulateTemplateResponse({ simulateTemplate: 'response' });
await actions.clickTemplateAt(0);
- expect(find('templateDetails.tab').length).toBe(4);
+ expect(find('templateDetails.tab').length).toBe(5);
expect(find('templateDetails.tab').map((t) => t.text())).toEqual([
'Summary',
'Settings',
'Mappings',
'Aliases',
+ 'Preview',
]);
// Summary tab should be initial active tab
expect(exists('summaryTab')).toBe(true);
// Navigate and verify all tabs
- actions.selectDetailsTab('settings');
+ await actions.selectDetailsTab('settings');
expect(exists('summaryTab')).toBe(false);
expect(exists('settingsTabContent')).toBe(true);
- actions.selectDetailsTab('aliases');
+ await actions.selectDetailsTab('aliases');
expect(exists('summaryTab')).toBe(false);
expect(exists('settingsTabContent')).toBe(false);
expect(exists('aliasesTabContent')).toBe(true);
- actions.selectDetailsTab('mappings');
+ await actions.selectDetailsTab('mappings');
expect(exists('summaryTab')).toBe(false);
expect(exists('settingsTabContent')).toBe(false);
expect(exists('aliasesTabContent')).toBe(false);
expect(exists('mappingsTabContent')).toBe(true);
+
+ await actions.selectDetailsTab('preview');
+ expect(exists('summaryTab')).toBe(false);
+ expect(exists('settingsTabContent')).toBe(false);
+ expect(exists('aliasesTabContent')).toBe(false);
+ expect(exists('mappingsTabContent')).toBe(false);
+ expect(exists('previewTabContent')).toBe(true);
+
+ expect(find('simulateTemplatePreview').text().replace(/\s/g, '')).toEqual(
+ JSON.stringify({ simulateTemplate: 'response' })
+ );
});
test('should show an info callout if data is not present', async () => {
@@ -568,17 +581,17 @@ describe('Index Templates tab', () => {
await actions.clickTemplateAt(0);
- expect(find('templateDetails.tab').length).toBe(4);
+ expect(find('templateDetails.tab').length).toBe(5);
expect(exists('summaryTab')).toBe(true);
// Navigate and verify callout message per tab
- actions.selectDetailsTab('settings');
+ await actions.selectDetailsTab('settings');
expect(exists('noSettingsCallout')).toBe(true);
- actions.selectDetailsTab('mappings');
+ await actions.selectDetailsTab('mappings');
expect(exists('noMappingsCallout')).toBe(true);
- actions.selectDetailsTab('aliases');
+ await actions.selectDetailsTab('aliases');
expect(exists('noAliasesCallout')).toBe(true);
});
});
diff --git a/x-pack/plugins/index_management/common/constants/index.ts b/x-pack/plugins/index_management/common/constants/index.ts
index d1700f0e611c0..11240271503e2 100644
--- a/x-pack/plugins/index_management/common/constants/index.ts
+++ b/x-pack/plugins/index_management/common/constants/index.ts
@@ -47,7 +47,9 @@ export {
UIM_TEMPLATE_DETAIL_PANEL_SETTINGS_TAB,
UIM_TEMPLATE_DETAIL_PANEL_MAPPINGS_TAB,
UIM_TEMPLATE_DETAIL_PANEL_ALIASES_TAB,
+ UIM_TEMPLATE_DETAIL_PANEL_PREVIEW_TAB,
UIM_TEMPLATE_CREATE,
UIM_TEMPLATE_UPDATE,
UIM_TEMPLATE_CLONE,
+ UIM_TEMPLATE_SIMULATE,
} from './ui_metric';
diff --git a/x-pack/plugins/index_management/common/constants/ui_metric.ts b/x-pack/plugins/index_management/common/constants/ui_metric.ts
index 5fda812c704d1..545555b92f352 100644
--- a/x-pack/plugins/index_management/common/constants/ui_metric.ts
+++ b/x-pack/plugins/index_management/common/constants/ui_metric.ts
@@ -41,6 +41,8 @@ export const UIM_TEMPLATE_DETAIL_PANEL_SUMMARY_TAB = 'template_details_summary_t
export const UIM_TEMPLATE_DETAIL_PANEL_SETTINGS_TAB = 'template_details_settings_tab';
export const UIM_TEMPLATE_DETAIL_PANEL_MAPPINGS_TAB = 'template_details_mappings_tab';
export const UIM_TEMPLATE_DETAIL_PANEL_ALIASES_TAB = 'template_details_aliases_tab';
+export const UIM_TEMPLATE_DETAIL_PANEL_PREVIEW_TAB = 'template_details_preview_tab';
export const UIM_TEMPLATE_CREATE = 'template_create';
export const UIM_TEMPLATE_UPDATE = 'template_update';
export const UIM_TEMPLATE_CLONE = 'template_clone';
+export const UIM_TEMPLATE_SIMULATE = 'template_simulate';
diff --git a/x-pack/plugins/index_management/common/lib/template_serialization.ts b/x-pack/plugins/index_management/common/lib/template_serialization.ts
index 069d6ac29fbca..1803d89a40016 100644
--- a/x-pack/plugins/index_management/common/lib/template_serialization.ts
+++ b/x-pack/plugins/index_management/common/lib/template_serialization.ts
@@ -109,7 +109,7 @@ export function serializeLegacyTemplate(template: TemplateDeserialized): LegacyT
version,
order,
indexPatterns,
- template: { settings, aliases, mappings },
+ template: { settings, aliases, mappings } = {},
} = template;
return {
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts
index 3d496d68cc66e..a112d73230b82 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts
@@ -61,11 +61,10 @@ describe('', () => {
const { exists, find, actions, component } = testBed;
// Verify flyout exists with correct title
- expect(exists('componentTemplateDetails')).toBe(true);
- expect(find('componentTemplateDetails.title').text()).toBe(COMPONENT_TEMPLATE.name);
+ expect(find('title').text()).toBe(COMPONENT_TEMPLATE.name);
// Verify footer does not display since "actions" prop was not provided
- expect(exists('componentTemplateDetails.footer')).toBe(false);
+ expect(exists('footer')).toBe(false);
// Verify tabs exist
expect(exists('settingsTab')).toBe(true);
@@ -185,7 +184,7 @@ describe('', () => {
const { exists, actions, component, find } = testBed;
// Verify footer exists
- expect(exists('componentTemplateDetails.footer')).toBe(true);
+ expect(exists('footer')).toBe(true);
expect(exists('manageComponentTemplateButton')).toBe(true);
// Click manage button and verify actions
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts
index 25c2d654fd900..fe81e8dcfe123 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts
@@ -6,7 +6,7 @@
import { registerTestBed, TestBed } from '../../../../../../../../../test_utils';
import { WithAppDependencies } from './setup_environment';
-import { ComponentTemplateDetailsFlyout } from '../../../component_template_details';
+import { ComponentTemplateDetailsFlyoutContent } from '../../../component_template_details';
export type ComponentTemplateDetailsTestBed = TestBed & {
actions: ReturnType;
@@ -44,7 +44,7 @@ const createActions = (testBed: TestBed) =
export const setup = (props: any): ComponentTemplateDetailsTestBed => {
const setupTestBed = registerTestBed(
- WithAppDependencies(ComponentTemplateDetailsFlyout),
+ WithAppDependencies(ComponentTemplateDetailsFlyoutContent),
{
memoryRouter: {
wrapComponent: false,
@@ -65,6 +65,8 @@ export type ComponentTemplateDetailsTestSubjects =
| 'componentTemplateDetails'
| 'componentTemplateDetails.title'
| 'componentTemplateDetails.footer'
+ | 'title'
+ | 'footer'
| 'summaryTab'
| 'mappingsTab'
| 'settingsTab'
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx
index 7e460d3855cb0..2f7317e3e656b 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx
@@ -15,12 +15,15 @@ import {
applicationServiceMock,
} from '../../../../../../../../../../src/core/public/mocks';
+import { GlobalFlyout } from '../../../../../../../../../../src/plugins/es_ui_shared/public';
+import { MappingsEditorProvider } from '../../../../mappings_editor';
import { ComponentTemplatesProvider } from '../../../component_templates_context';
import { init as initHttpRequests } from './http_requests';
import { API_BASE_PATH } from './constants';
const mockHttpClient = axios.create({ adapter: axiosXhrAdapter });
+const { GlobalFlyoutProvider } = GlobalFlyout;
const appDependencies = {
httpClient: (mockHttpClient as unknown) as HttpSetup,
@@ -42,7 +45,11 @@ export const setupEnvironment = () => {
};
export const WithAppDependencies = (Comp: any) => (props: any) => (
-
-
-
+
+
+
+
+
+
+
);
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx
index 60f1fff3cc9de..0f5bc64c358b9 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx
@@ -8,7 +8,6 @@ import React, { useState } from 'react';
import { FormattedMessage } from '@kbn/i18n/react';
import {
- EuiFlyout,
EuiFlyoutHeader,
EuiTitle,
EuiFlyoutBody,
@@ -28,14 +27,19 @@ import { ComponentTemplateTabs, TabType } from './tabs';
import { ManageButton, ManageAction } from './manage_button';
import { attemptToDecodeURI } from '../lib';
-interface Props {
+export interface Props {
componentTemplateName: string;
onClose: () => void;
actions?: ManageAction[];
showSummaryCallToAction?: boolean;
}
-export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({
+export const defaultFlyoutProps = {
+ 'data-test-subj': 'componentTemplateDetails',
+ 'aria-labelledby': 'componentTemplateDetailsFlyoutTitle',
+};
+
+export const ComponentTemplateDetailsFlyoutContent: React.FunctionComponent = ({
componentTemplateName,
onClose,
actions,
@@ -109,13 +113,7 @@ export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({
}
return (
-
+ <>
@@ -172,6 +170,6 @@ export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({
)}
-
+ >
);
};
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts
index 11aac200a2f14..8687a1f5b89c0 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts
@@ -4,4 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
-export { ComponentTemplateDetailsFlyout } from './component_template_details';
+export {
+ ComponentTemplateDetailsFlyoutContent,
+ defaultFlyoutProps,
+ Props as ComponentTemplateDetailsProps,
+} from './component_template_details';
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx
index efc8b649ef872..8ba7409a9ac57 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx
@@ -4,18 +4,22 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useState, useEffect } from 'react';
+import React, { useState, useEffect, useCallback } from 'react';
import { RouteComponentProps } from 'react-router-dom';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import { ScopedHistory } from 'kibana/public';
import { EuiLink, EuiText, EuiSpacer } from '@elastic/eui';
-import { SectionLoading, ComponentTemplateDeserialized } from '../shared_imports';
+import { SectionLoading, ComponentTemplateDeserialized, GlobalFlyout } from '../shared_imports';
import { UIM_COMPONENT_TEMPLATE_LIST_LOAD } from '../constants';
import { attemptToDecodeURI } from '../lib';
import { useComponentTemplatesContext } from '../component_templates_context';
-import { ComponentTemplateDetailsFlyout } from '../component_template_details';
+import {
+ ComponentTemplateDetailsFlyoutContent,
+ defaultFlyoutProps,
+ ComponentTemplateDetailsProps,
+} from '../component_template_details';
import { EmptyPrompt } from './empty_prompt';
import { ComponentTable } from './table';
import { LoadError } from './error';
@@ -26,39 +30,112 @@ interface Props {
history: RouteComponentProps['history'];
}
+const { useGlobalFlyout } = GlobalFlyout;
+
export const ComponentTemplateList: React.FunctionComponent = ({
componentTemplateName,
history,
}) => {
+ const {
+ addContent: addContentToGlobalFlyout,
+ removeContent: removeContentFromGlobalFlyout,
+ } = useGlobalFlyout();
const { api, trackMetric, documentation } = useComponentTemplatesContext();
const { data, isLoading, error, sendRequest } = api.useLoadComponentTemplates();
const [componentTemplatesToDelete, setComponentTemplatesToDelete] = useState([]);
- const goToComponentTemplateList = () => {
+ const goToComponentTemplateList = useCallback(() => {
return history.push({
pathname: 'component_templates',
});
- };
-
- const goToEditComponentTemplate = (name: string) => {
- return history.push({
- pathname: encodeURI(`edit_component_template/${encodeURIComponent(name)}`),
- });
- };
+ }, [history]);
+
+ const goToEditComponentTemplate = useCallback(
+ (name: string) => {
+ return history.push({
+ pathname: encodeURI(`edit_component_template/${encodeURIComponent(name)}`),
+ });
+ },
+ [history]
+ );
- const goToCloneComponentTemplate = (name: string) => {
- return history.push({
- pathname: encodeURI(`create_component_template/${encodeURIComponent(name)}`),
- });
- };
+ const goToCloneComponentTemplate = useCallback(
+ (name: string) => {
+ return history.push({
+ pathname: encodeURI(`create_component_template/${encodeURIComponent(name)}`),
+ });
+ },
+ [history]
+ );
// Track component loaded
useEffect(() => {
trackMetric('loaded', UIM_COMPONENT_TEMPLATE_LIST_LOAD);
}, [trackMetric]);
+ useEffect(() => {
+ if (componentTemplateName) {
+ const actions = [
+ {
+ name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.editButtonLabel', {
+ defaultMessage: 'Edit',
+ }),
+ icon: 'pencil',
+ handleActionClick: () =>
+ goToEditComponentTemplate(attemptToDecodeURI(componentTemplateName)),
+ },
+ {
+ name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.cloneActionLabel', {
+ defaultMessage: 'Clone',
+ }),
+ icon: 'copy',
+ handleActionClick: () =>
+ goToCloneComponentTemplate(attemptToDecodeURI(componentTemplateName)),
+ },
+ {
+ name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.deleteButtonLabel', {
+ defaultMessage: 'Delete',
+ }),
+ icon: 'trash',
+ getIsDisabled: (details: ComponentTemplateDeserialized) =>
+ details._kbnMeta.usedBy.length > 0,
+ closePopoverOnClick: true,
+ handleActionClick: () => {
+ setComponentTemplatesToDelete([attemptToDecodeURI(componentTemplateName)]);
+ },
+ },
+ ];
+
+ // Open the flyout with the Component Template Details content
+ addContentToGlobalFlyout({
+ id: 'componentTemplateDetails',
+ Component: ComponentTemplateDetailsFlyoutContent,
+ props: {
+ onClose: goToComponentTemplateList,
+ componentTemplateName,
+ showSummaryCallToAction: true,
+ actions,
+ },
+ flyoutProps: { ...defaultFlyoutProps, onClose: goToComponentTemplateList },
+ });
+ }
+ }, [
+ componentTemplateName,
+ goToComponentTemplateList,
+ goToEditComponentTemplate,
+ goToCloneComponentTemplate,
+ addContentToGlobalFlyout,
+ history,
+ ]);
+
+ useEffect(() => {
+ if (!componentTemplateName) {
+ removeContentFromGlobalFlyout('componentTemplateDetails');
+ }
+ }, [componentTemplateName, removeContentFromGlobalFlyout]);
+
let content: React.ReactNode;
if (isLoading) {
@@ -126,45 +203,6 @@ export const ComponentTemplateList: React.FunctionComponent = ({
componentTemplatesToDelete={componentTemplatesToDelete}
/>
) : null}
-
- {/* details flyout */}
- {componentTemplateName && (
-
- goToEditComponentTemplate(attemptToDecodeURI(componentTemplateName)),
- },
- {
- name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.cloneActionLabel', {
- defaultMessage: 'Clone',
- }),
- icon: 'copy',
- handleActionClick: () =>
- goToCloneComponentTemplate(attemptToDecodeURI(componentTemplateName)),
- },
- {
- name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.deleteButtonLabel', {
- defaultMessage: 'Delete',
- }),
- icon: 'trash',
- getIsDisabled: (details: ComponentTemplateDeserialized) =>
- details._kbnMeta.usedBy.length > 0,
- closePopoverOnClick: true,
- handleActionClick: () => {
- setComponentTemplatesToDelete([attemptToDecodeURI(componentTemplateName)]);
- },
- },
- ]}
- />
- )}
);
};
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx
index 8795c08fd2bee..ed570579d4e45 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx
@@ -11,8 +11,12 @@ import { FormattedMessage } from '@kbn/i18n/react';
import { i18n } from '@kbn/i18n';
import { ComponentTemplateListItem } from '../../../../../common';
-import { SectionError, SectionLoading } from '../shared_imports';
-import { ComponentTemplateDetailsFlyout } from '../component_template_details';
+import { SectionError, SectionLoading, GlobalFlyout } from '../shared_imports';
+import {
+ ComponentTemplateDetailsFlyoutContent,
+ defaultFlyoutProps,
+ ComponentTemplateDetailsProps,
+} from '../component_template_details';
import { CreateButtonPopOver } from './components';
import { ComponentTemplates } from './component_templates';
import { ComponentTemplatesSelection } from './component_templates_selection';
@@ -20,10 +24,12 @@ import { useApi } from '../component_templates_context';
import './component_templates_selector.scss';
+const { useGlobalFlyout } = GlobalFlyout;
+
interface Props {
onChange: (components: string[]) => void;
onComponentsLoaded: (components: ComponentTemplateListItem[]) => void;
- defaultValue: string[];
+ defaultValue?: string[];
docUri: string;
emptyPrompt?: {
text?: string | JSX.Element;
@@ -53,6 +59,10 @@ export const ComponentTemplatesSelector = ({
emptyPrompt: { text, showCreateButton } = {},
}: Props) => {
const { data: components, isLoading, error } = useApi().useLoadComponentTemplates();
+ const {
+ addContent: addContentToGlobalFlyout,
+ removeContent: removeContentFromGlobalFlyout,
+ } = useGlobalFlyout();
const [selectedComponent, setSelectedComponent] = useState(null);
const [componentsSelected, setComponentsSelected] = useState([]);
const isInitialized = useRef(false);
@@ -60,15 +70,20 @@ export const ComponentTemplatesSelector = ({
const hasSelection = Object.keys(componentsSelected).length > 0;
const hasComponents = components && components.length > 0 ? true : false;
+ const closeComponentTemplateDetails = () => {
+ setSelectedComponent(null);
+ };
+
useEffect(() => {
if (components) {
if (
+ defaultValue &&
defaultValue.length > 0 &&
componentsSelected.length === 0 &&
isInitialized.current === false
) {
- // Once the components are loaded we check the ones selected
- // from the defaultValue provided
+ // Once the components are fetched, we check the ones previously selected
+ // from the prop "defaultValue" passed.
const nextComponentsSelected = defaultValue
.map((name) => components.find((comp) => comp.name === name))
.filter(Boolean) as ComponentTemplateListItem[];
@@ -88,6 +103,30 @@ export const ComponentTemplatesSelector = ({
}
}, [isLoading, error, components, onComponentsLoaded]);
+ useEffect(() => {
+ if (selectedComponent) {
+ // Open the flyout with the Component Template Details content
+ addContentToGlobalFlyout({
+ id: 'componentTemplateDetails',
+ Component: ComponentTemplateDetailsFlyoutContent,
+ props: {
+ onClose: closeComponentTemplateDetails,
+ componentTemplateName: selectedComponent,
+ },
+ flyoutProps: { ...defaultFlyoutProps, onClose: closeComponentTemplateDetails },
+ cleanUpFunc: () => {
+ setSelectedComponent(null);
+ },
+ });
+ }
+ }, [selectedComponent, addContentToGlobalFlyout]);
+
+ useEffect(() => {
+ if (!selectedComponent) {
+ removeContentFromGlobalFlyout('componentTemplateDetails');
+ }
+ }, [selectedComponent, removeContentFromGlobalFlyout]);
+
const onSelectionReorder = (reorderedComponents: ComponentTemplateListItem[]) => {
setComponentsSelected(reorderedComponents);
};
@@ -198,30 +237,12 @@ export const ComponentTemplatesSelector = ({
);
- const renderComponentDetails = () => {
- if (!selectedComponent) {
- return null;
- }
-
- return (
- setSelectedComponent(null)}
- componentTemplateName={selectedComponent}
- />
- );
- };
-
if (isLoading) {
return renderLoading();
} else if (error) {
return renderError();
} else if (hasComponents) {
- return (
- <>
- {renderSelector()}
- {renderComponentDetails()}
- >
- );
+ return renderSelector();
}
// No components: render empty prompt
@@ -244,6 +265,7 @@ export const ComponentTemplatesSelector = ({
);
+
return (
{
+ const [templatePreview, setTemplatePreview] = useState('{}');
+
+ const updatePreview = useCallback(async () => {
+ if (!template || Object.keys(template).length === 0) {
+ return;
+ }
+
+ const indexTemplate = serializeTemplate(stripEmptyFields(template) as TemplateDeserialized);
+
+ // Until ES fixes a bug on their side we will send a random index pattern to the simulate API.
+ // Issue: https://github.com/elastic/elasticsearch/issues/59152
+ indexTemplate.index_patterns = [uuid.v4()];
+
+ const { data, error } = await simulateIndexTemplate(indexTemplate);
+
+ if (data) {
+ // "Overlapping" info is only useful when simulating against an index
+ // which we don't do here.
+ delete data.overlapping;
+ }
+
+ setTemplatePreview(JSON.stringify(data ?? error, null, 2));
+ }, [template]);
+
+ useEffect(() => {
+ updatePreview();
+ }, [updatePreview]);
+
+ return templatePreview === '{}' ? null : (
+
+ {templatePreview}
+
+ );
+});
diff --git a/x-pack/plugins/index_management/public/application/components/index_templates/simulate_template/simulate_template_flyout.tsx b/x-pack/plugins/index_management/public/application/components/index_templates/simulate_template/simulate_template_flyout.tsx
new file mode 100644
index 0000000000000..63bfe78546041
--- /dev/null
+++ b/x-pack/plugins/index_management/public/application/components/index_templates/simulate_template/simulate_template_flyout.tsx
@@ -0,0 +1,119 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+import React, { useState, useCallback, useEffect, useRef } from 'react';
+import { FormattedMessage } from '@kbn/i18n/react';
+import {
+ EuiFlyoutHeader,
+ EuiTitle,
+ EuiFlyoutBody,
+ EuiFlyoutFooter,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiButton,
+ EuiButtonEmpty,
+ EuiTextColor,
+ EuiText,
+ EuiSpacer,
+} from '@elastic/eui';
+
+import { SimulateTemplate } from './simulate_template';
+
+export interface Props {
+ onClose(): void;
+ getTemplate: () => { [key: string]: any };
+}
+
+export const defaultFlyoutProps = {
+ 'data-test-subj': 'simulateTemplateFlyout',
+ 'aria-labelledby': 'simulateTemplateFlyoutTitle',
+};
+
+export const SimulateTemplateFlyoutContent = ({ onClose, getTemplate }: Props) => {
+ const isMounted = useRef(false);
+ const [heightCodeBlock, setHeightCodeBlock] = useState(0);
+ const [template, setTemplate] = useState<{ [key: string]: any }>({});
+
+ useEffect(() => {
+ setHeightCodeBlock(
+ document.getElementsByClassName('euiFlyoutBody__overflow')[0].clientHeight - 96
+ );
+ }, []);
+
+ const updatePreview = useCallback(async () => {
+ const indexTemplate = await getTemplate();
+ setTemplate(indexTemplate);
+ }, [getTemplate]);
+
+ useEffect(() => {
+ if (isMounted.current === false) {
+ updatePreview();
+ }
+ isMounted.current = true;
+ }, [updatePreview]);
+
+ return (
+ <>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ >
+ );
+};
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx
index 311cb37d0b47a..64347d19e9b47 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx
@@ -36,8 +36,6 @@ describe('Mappings editor: shape datatype', () => {
test('initial view and default parameters values', async () => {
const defaultMappings = {
- _meta: {},
- _source: {},
properties: {
myField: {
type: 'shape',
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx
index ed60414d198f1..c03aa4805d27f 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx
@@ -47,8 +47,6 @@ describe.skip('Mappings editor: text datatype', () => {
test('initial view and default parameters values', async () => {
const defaultMappings = {
- _meta: {},
- _source: {},
properties: {
myField: {
type: 'text',
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx
index 4f9d8a960a1a2..c146c7704911f 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx
@@ -65,8 +65,6 @@ describe('Mappings editor: edit field', () => {
test('should update form parameters when changing the field datatype', async () => {
const defaultMappings = {
- _meta: {},
- _source: {},
properties: {
userName: {
...defaultTextParameters,
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx
index 638bbfd925ffb..a6558b28a1273 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx
@@ -7,9 +7,11 @@ import React from 'react';
import { act } from 'react-dom/test-utils';
import { ReactWrapper } from 'enzyme';
+import { GlobalFlyout } from '../../../../../../../../../../src/plugins/es_ui_shared/public';
import { registerTestBed, TestBed } from '../../../../../../../../../test_utils';
import { getChildFieldsName } from '../../../lib';
import { MappingsEditor } from '../../../mappings_editor';
+import { MappingsEditorProvider } from '../../../mappings_editor_context';
jest.mock('@elastic/eui', () => {
const original = jest.requireActual('@elastic/eui');
@@ -51,6 +53,8 @@ jest.mock('@elastic/eui', () => {
};
});
+const { GlobalFlyoutProvider } = GlobalFlyout;
+
export interface DomFields {
[key: string]: {
type: string;
@@ -247,7 +251,15 @@ const createActions = (testBed: TestBed) => {
};
export const setup = (props: any = { onUpdate() {} }): MappingsEditorTestBed => {
- const setupTestBed = registerTestBed(MappingsEditor, {
+ const ComponentToTest = (propsOverride: { [key: string]: any }) => (
+
+
+
+
+
+ );
+
+ const setupTestBed = registerTestBed(ComponentToTest, {
memoryRouter: {
wrapComponent: false,
},
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx
index 86bcc796a88eb..20b2e11855029 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx
@@ -7,16 +7,14 @@ import React, { useEffect, useRef } from 'react';
import { EuiSpacer } from '@elastic/eui';
import { useForm, Form, SerializerFunc } from '../../shared_imports';
-import { GenericObject } from '../../types';
-import { Types, useDispatch } from '../../mappings_state';
+import { GenericObject, MappingsConfiguration } from '../../types';
+import { useDispatch } from '../../mappings_state_context';
import { DynamicMappingSection } from './dynamic_mapping_section';
import { SourceFieldSection } from './source_field_section';
import { MetaFieldSection } from './meta_field_section';
import { RoutingSection } from './routing_section';
import { configurationFormSchema } from './configuration_form_schema';
-type MappingsConfiguration = Types['MappingsConfiguration'];
-
interface Props {
value?: MappingsConfiguration;
}
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx
index 6e80f8b813ec2..8742dfc916924 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx
@@ -11,8 +11,7 @@ import { EuiLink, EuiCode } from '@elastic/eui';
import { documentationService } from '../../../../services/documentation';
import { FormSchema, FIELD_TYPES, VALIDATION_TYPES, fieldValidators } from '../../shared_imports';
-import { MappingsConfiguration } from '../../reducer';
-import { ComboBoxOption } from '../../types';
+import { ComboBoxOption, MappingsConfiguration } from '../../types';
const { containsCharsField, isJsonField } = fieldValidators;
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx
index 400de4052afa4..4b19b6f7ae5c3 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx
@@ -6,7 +6,7 @@
import React, { useMemo, useCallback } from 'react';
import { EuiSpacer } from '@elastic/eui';
-import { useMappingsState, useDispatch } from '../../mappings_state';
+import { useMappingsState, useDispatch } from '../../mappings_state_context';
import { deNormalize } from '../../lib';
import { EditFieldContainer } from './fields';
import { DocumentFieldsHeader } from './document_fields_header';
@@ -18,7 +18,7 @@ export const DocumentFields = React.memo(() => {
const { fields, search, documentFields } = useMappingsState();
const dispatch = useDispatch();
- const { status, fieldToEdit, editor: editorType } = documentFields;
+ const { editor: editorType } = documentFields;
const jsonEditorDefaultValue = useMemo(() => {
if (editorType === 'json') {
@@ -33,14 +33,6 @@ export const DocumentFields = React.memo(() => {
);
- const renderEditField = () => {
- if (status !== 'editingField') {
- return null;
- }
- const field = fields.byId[fieldToEdit!];
- return ;
- };
-
const onSearchChange = useCallback(
(value: string) => {
dispatch({ type: 'search:update', value });
@@ -59,7 +51,7 @@ export const DocumentFields = React.memo(() => {
) : (
editor
)}
- {renderEditField()}
+
);
});
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx
index 51f9ca63be403..ad283a3fe47bd 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx
@@ -7,7 +7,7 @@
import React from 'react';
import { EuiButton, EuiText } from '@elastic/eui';
-import { useDispatch, useMappingsState } from '../../mappings_state';
+import { useDispatch, useMappingsState } from '../../mappings_state_context';
import { FieldsEditor } from '../../types';
import { canUseMappingsEditor, normalize } from '../../lib';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx
index 01cca7e249a23..0320f2ff51da3 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx
@@ -9,7 +9,7 @@ import React from 'react';
import { TextField, UseField, FieldConfig } from '../../../shared_imports';
import { validateUniqueName } from '../../../lib';
import { PARAMETERS_DEFINITION } from '../../../constants';
-import { useMappingsState } from '../../../mappings_state';
+import { useMappingsState } from '../../../mappings_state_context';
export const NameParameter = () => {
const {
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx
index 46e70bf8e56ba..31ae37c82a43e 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx
@@ -70,7 +70,13 @@ export const TypeParameter = ({ isMultiField, isRootLevelField, showDocLink = fa
: filterTypesForNonRootFields(FIELD_TYPES_OPTIONS)
}
selectedOptions={typeField.value}
- onChange={typeField.setValue}
+ onChange={(value) => {
+ if (value.length === 0) {
+ // Don't allow clearing the type. One must always be selected
+ return;
+ }
+ typeField.setValue(value);
+ }}
isClearable={false}
data-test-subj="fieldType"
/>
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx
index 57a765c38dd26..dc631b7dbf32d 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx
@@ -18,7 +18,7 @@ import {
import { useForm, Form, FormDataProvider } from '../../../../shared_imports';
import { EUI_SIZE } from '../../../../constants';
-import { useDispatch } from '../../../../mappings_state';
+import { useDispatch } from '../../../../mappings_state_context';
import { fieldSerializer } from '../../../../lib';
import { Field, NormalizedFields } from '../../../../types';
import { NameParameter, TypeParameter, SubTypeParameter } from '../../field_parameters';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx
index 80e3e9bec605a..2a98b5948e5a9 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx
@@ -7,7 +7,7 @@
import React, { useState } from 'react';
import { i18n } from '@kbn/i18n';
-import { useMappingsState, useDispatch } from '../../../mappings_state';
+import { useMappingsState, useDispatch } from '../../../mappings_state_context';
import { NormalizedField } from '../../../types';
import { getAllDescendantAliases } from '../../../lib';
import { ModalConfirmationDeleteFields } from './modal_confirmation_delete_fields';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx
index e8e41955a5e80..e6950ccfe253e 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx
@@ -6,7 +6,6 @@
import React from 'react';
import { i18n } from '@kbn/i18n';
import {
- EuiFlyout,
EuiFlyoutHeader,
EuiFlyoutBody,
EuiFlyoutFooter,
@@ -25,7 +24,7 @@ import { TYPE_DEFINITION } from '../../../../constants';
import { Field, NormalizedField, NormalizedFields, MainType, SubType } from '../../../../types';
import { CodeBlock } from '../../../code_block';
import { getParametersFormForType } from '../field_types';
-import { UpdateFieldProvider, UpdateFieldFunc } from './update_field_provider';
+import { UpdateFieldFunc } from './use_update_field';
import { EditFieldHeaderForm } from './edit_field_header_form';
const limitStringLength = (text: string, limit = 18): string => {
@@ -36,19 +35,28 @@ const limitStringLength = (text: string, limit = 18): string => {
return `...${text.substr(limit * -1)}`;
};
-interface Props {
+export interface Props {
form: FormHook;
field: NormalizedField;
allFields: NormalizedFields['byId'];
exitEdit(): void;
+ updateField: UpdateFieldFunc;
}
-export const EditField = React.memo(({ form, field, allFields, exitEdit }: Props) => {
- const getSubmitForm = (updateField: UpdateFieldFunc) => async (e?: React.FormEvent) => {
- if (e) {
- e.preventDefault();
- }
+export const defaultFlyoutProps = {
+ 'data-test-subj': 'mappingsEditorFieldEdit',
+ 'aria-labelledby': 'mappingsEditorFieldEditTitle',
+ className: 'mappingsEditor__editField',
+ maxWidth: 720,
+};
+
+// The default FormWrapper is the , which wrapps the form with
+// a . We can't have a div as first child of the Flyout as it breaks
+// the height calculaction and does not render the footer position correctly.
+const FormWrapper: React.FC = ({ children }) => <>{children}>;
+export const EditField = React.memo(({ form, field, allFields, exitEdit, updateField }: Props) => {
+ const submitForm = async () => {
const { isValid, data } = await form.submit();
if (isValid) {
@@ -56,174 +64,152 @@ export const EditField = React.memo(({ form, field, allFields, exitEdit }: Props
}
};
- const cancel = () => {
- exitEdit();
- };
-
const { isMultiField } = field;
return (
-
- {(updateField) => (
-
- )}
-
+ );
+ }}
+
+
+
+ {/* Field path */}
+
+
+ {field.path.join(' > ')}
+
+
+
+
+
+
+
+
+ {({ type, subType }) => {
+ const ParametersForm = getParametersFormForType(type, subType);
+
+ if (!ParametersForm) {
+ return null;
+ }
+
+ return (
+
+ );
+ }}
+
+
+
+
+ {form.isSubmitted && !form.isValid && (
+ <>
+
+
+ >
+ )}
+
+
+
+
+ {i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldCancelButtonLabel', {
+ defaultMessage: 'Cancel',
+ })}
+
+
+
+
+ {i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldUpdateButtonLabel', {
+ defaultMessage: 'Update',
+ })}
+
+
+
+
+
);
});
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx
index 5105a2a157a6d..4996f59105c04 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx
@@ -3,24 +3,38 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useEffect, useCallback } from 'react';
+import React, { useEffect, useCallback, useMemo } from 'react';
-import { useForm } from '../../../../shared_imports';
-import { useDispatch } from '../../../../mappings_state';
-import { Field, NormalizedField, NormalizedFields } from '../../../../types';
+import { useForm, GlobalFlyout } from '../../../../shared_imports';
+import { useDispatch, useMappingsState } from '../../../../mappings_state_context';
+import { Field } from '../../../../types';
import { fieldSerializer, fieldDeserializer } from '../../../../lib';
-import { EditField } from './edit_field';
+import { ModalConfirmationDeleteFields } from '../modal_confirmation_delete_fields';
+import { EditField, defaultFlyoutProps, Props as EditFieldProps } from './edit_field';
+import { useUpdateField } from './use_update_field';
-interface Props {
- field: NormalizedField;
- allFields: NormalizedFields['byId'];
-}
+const { useGlobalFlyout } = GlobalFlyout;
-export const EditFieldContainer = React.memo(({ field, allFields }: Props) => {
+export const EditFieldContainer = React.memo(() => {
+ const { fields, documentFields } = useMappingsState();
const dispatch = useDispatch();
+ const {
+ addContent: addContentToGlobalFlyout,
+ removeContent: removeContentFromGlobalFlyout,
+ } = useGlobalFlyout();
+ const { updateField, modal } = useUpdateField();
+
+ const { status, fieldToEdit } = documentFields;
+ const isEditing = status === 'editingField';
+
+ const field = isEditing ? fields.byId[fieldToEdit!] : undefined;
+
+ const formDefaultValue = useMemo(() => {
+ return { ...field?.source };
+ }, [field?.source]);
const { form } = useForm
({
- defaultValue: { ...field.source },
+ defaultValue: formDefaultValue,
serializer: fieldSerializer,
deserializer: fieldDeserializer,
options: { stripEmptyFields: false },
@@ -40,5 +54,48 @@ export const EditFieldContainer = React.memo(({ field, allFields }: Props) => {
dispatch({ type: 'documentField.changeStatus', value: 'idle' });
}, [dispatch]);
- return ;
+ useEffect(() => {
+ if (isEditing) {
+ // Open the flyout with the content
+ addContentToGlobalFlyout({
+ id: 'mappingsEditField',
+ Component: EditField,
+ props: {
+ form,
+ field: field!,
+ exitEdit,
+ allFields: fields.byId,
+ updateField,
+ },
+ flyoutProps: { ...defaultFlyoutProps, onClose: exitEdit },
+ cleanUpFunc: exitEdit,
+ });
+ }
+ }, [
+ isEditing,
+ field,
+ form,
+ addContentToGlobalFlyout,
+ fields.byId,
+ fieldToEdit,
+ exitEdit,
+ updateField,
+ ]);
+
+ useEffect(() => {
+ if (!isEditing) {
+ removeContentFromGlobalFlyout('mappingsEditField');
+ }
+ }, [isEditing, removeContentFromGlobalFlyout]);
+
+ useEffect(() => {
+ return () => {
+ if (isEditing) {
+ // When the component unmounts, exit edit mode.
+ exitEdit();
+ }
+ };
+ }, [isEditing, exitEdit]);
+
+ return modal.isOpen ? : null;
});
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/update_field_provider.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/update_field_provider.tsx
deleted file mode 100644
index e31d12689e7e0..0000000000000
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/update_field_provider.tsx
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import React, { useState } from 'react';
-import { i18n } from '@kbn/i18n';
-
-import { useMappingsState, useDispatch } from '../../../../mappings_state';
-import { shouldDeleteChildFieldsAfterTypeChange, getAllDescendantAliases } from '../../../../lib';
-import { NormalizedField, DataType } from '../../../../types';
-import { PARAMETERS_DEFINITION } from '../../../../constants';
-import { ModalConfirmationDeleteFields } from '../modal_confirmation_delete_fields';
-
-export type UpdateFieldFunc = (field: NormalizedField) => void;
-
-interface Props {
- children: (saveProperty: UpdateFieldFunc) => React.ReactNode;
-}
-
-interface State {
- isModalOpen: boolean;
- field?: NormalizedField;
- aliases?: string[];
-}
-
-export const UpdateFieldProvider = ({ children }: Props) => {
- const [state, setState] = useState({
- isModalOpen: false,
- });
- const dispatch = useDispatch();
-
- const { fields } = useMappingsState();
- const { byId, aliases } = fields;
-
- const confirmButtonText = i18n.translate(
- 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.confirmDescription',
- {
- defaultMessage: 'Confirm type change',
- }
- );
-
- let modalTitle: string | undefined;
-
- if (state.field) {
- const { source } = state.field;
-
- modalTitle = i18n.translate(
- 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.title',
- {
- defaultMessage: "Confirm change '{fieldName}' type to '{fieldType}'.",
- values: {
- fieldName: source.name,
- fieldType: source.type,
- },
- }
- );
- }
-
- const closeModal = () => {
- setState({ isModalOpen: false });
- };
-
- const updateField: UpdateFieldFunc = (field) => {
- const previousField = byId[field.id];
-
- const willDeleteChildFields = (oldType: DataType, newType: DataType): boolean => {
- const { hasChildFields, hasMultiFields } = field;
-
- if (!hasChildFields && !hasMultiFields) {
- // No child or multi-fields will be deleted, no confirmation needed.
- return false;
- }
-
- return shouldDeleteChildFieldsAfterTypeChange(oldType, newType);
- };
-
- if (field.source.type !== previousField.source.type) {
- // Array of all the aliases pointing to the current field beeing updated
- const aliasesOnField = aliases[field.id] || [];
-
- // Array of all the aliases pointing to the current field + all its possible children
- const aliasesOnFieldAndDescendants = getAllDescendantAliases(field, fields);
-
- const isReferencedByAlias = aliasesOnField && Boolean(aliasesOnField.length);
- const nextTypeCanHaveAlias = !PARAMETERS_DEFINITION.path.targetTypesNotAllowed.includes(
- field.source.type
- );
-
- // We need to check if, by changing the type, we will also
- // delete possible child properties ("fields" or "properties").
- // If we will, we need to warn the user about it.
- let requiresConfirmation: boolean;
- let aliasesToDelete: string[] = [];
-
- if (isReferencedByAlias && !nextTypeCanHaveAlias) {
- aliasesToDelete = aliasesOnFieldAndDescendants;
- requiresConfirmation = true;
- } else {
- requiresConfirmation = willDeleteChildFields(previousField.source.type, field.source.type);
- if (requiresConfirmation) {
- aliasesToDelete = aliasesOnFieldAndDescendants.filter(
- // We will only delete aliases that points to possible children, *NOT* the field itself
- (id) => aliasesOnField.includes(id) === false
- );
- }
- }
-
- if (requiresConfirmation) {
- setState({
- isModalOpen: true,
- field,
- aliases: Boolean(aliasesToDelete.length)
- ? aliasesToDelete.map((id) => byId[id].path.join(' > ')).sort()
- : undefined,
- });
- return;
- }
- }
-
- dispatch({ type: 'field.edit', value: field.source });
- };
-
- const confirmTypeUpdate = () => {
- dispatch({ type: 'field.edit', value: state.field!.source });
- closeModal();
- };
-
- return (
- <>
- {children(updateField)}
-
- {state.isModalOpen && (
-
- )}
- >
- );
-};
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/use_update_field.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/use_update_field.ts
new file mode 100644
index 0000000000000..ed659cd05b060
--- /dev/null
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/use_update_field.ts
@@ -0,0 +1,146 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { useState, useCallback } from 'react';
+import { i18n } from '@kbn/i18n';
+
+import { useMappingsState, useDispatch } from '../../../../mappings_state_context';
+import { shouldDeleteChildFieldsAfterTypeChange, getAllDescendantAliases } from '../../../../lib';
+import { NormalizedField, DataType } from '../../../../types';
+import { PARAMETERS_DEFINITION } from '../../../../constants';
+
+export type UpdateFieldFunc = (field: NormalizedField) => void;
+
+interface State {
+ isModalOpen: boolean;
+ field?: NormalizedField;
+ aliases?: string[];
+}
+
+export const useUpdateField = () => {
+ const [state, setState] = useState({
+ isModalOpen: false,
+ });
+ const dispatch = useDispatch();
+
+ const { fields } = useMappingsState();
+ const { byId, aliases } = fields;
+
+ const confirmButtonText = i18n.translate(
+ 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.confirmDescription',
+ {
+ defaultMessage: 'Confirm type change',
+ }
+ );
+
+ let modalTitle = '';
+
+ if (state.field) {
+ const { source } = state.field;
+
+ modalTitle = i18n.translate(
+ 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.title',
+ {
+ defaultMessage: "Confirm change '{fieldName}' type to '{fieldType}'.",
+ values: {
+ fieldName: source.name,
+ fieldType: source.type,
+ },
+ }
+ );
+ }
+
+ const closeModal = () => {
+ setState({ isModalOpen: false });
+ };
+
+ const updateField: UpdateFieldFunc = useCallback(
+ (field) => {
+ const previousField = byId[field.id];
+
+ const willDeleteChildFields = (oldType: DataType, newType: DataType): boolean => {
+ const { hasChildFields, hasMultiFields } = field;
+
+ if (!hasChildFields && !hasMultiFields) {
+ // No child or multi-fields will be deleted, no confirmation needed.
+ return false;
+ }
+
+ return shouldDeleteChildFieldsAfterTypeChange(oldType, newType);
+ };
+
+ if (field.source.type !== previousField.source.type) {
+ // Array of all the aliases pointing to the current field beeing updated
+ const aliasesOnField = aliases[field.id] || [];
+
+ // Array of all the aliases pointing to the current field + all its possible children
+ const aliasesOnFieldAndDescendants = getAllDescendantAliases(field, fields);
+
+ const isReferencedByAlias = aliasesOnField && Boolean(aliasesOnField.length);
+ const nextTypeCanHaveAlias = !PARAMETERS_DEFINITION.path.targetTypesNotAllowed.includes(
+ field.source.type
+ );
+
+ // We need to check if, by changing the type, we will also
+ // delete possible child properties ("fields" or "properties").
+ // If we will, we need to warn the user about it.
+ let requiresConfirmation: boolean;
+ let aliasesToDelete: string[] = [];
+
+ if (isReferencedByAlias && !nextTypeCanHaveAlias) {
+ aliasesToDelete = aliasesOnFieldAndDescendants;
+ requiresConfirmation = true;
+ } else {
+ requiresConfirmation = willDeleteChildFields(
+ previousField.source.type,
+ field.source.type
+ );
+ if (requiresConfirmation) {
+ aliasesToDelete = aliasesOnFieldAndDescendants.filter(
+ // We will only delete aliases that points to possible children, *NOT* the field itself
+ (id) => aliasesOnField.includes(id) === false
+ );
+ }
+ }
+
+ if (requiresConfirmation) {
+ setState({
+ isModalOpen: true,
+ field,
+ aliases: Boolean(aliasesToDelete.length)
+ ? aliasesToDelete.map((id) => byId[id].path.join(' > ')).sort()
+ : undefined,
+ });
+ return;
+ }
+ }
+
+ dispatch({ type: 'field.edit', value: field.source });
+ },
+ [dispatch, aliases, fields, byId]
+ );
+
+ const confirmTypeUpdate = () => {
+ dispatch({ type: 'field.edit', value: state.field!.source });
+ closeModal();
+ };
+
+ return {
+ updateField,
+ modal: {
+ isOpen: state.isModalOpen,
+ props: {
+ childFields: state.field && state.field.childFields,
+ title: modalTitle,
+ aliases: state.aliases,
+ byId,
+ confirmButtonText,
+ onConfirm: confirmTypeUpdate,
+ onCancel: closeModal,
+ },
+ },
+ };
+};
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx
index 55093e606cfa1..7d9ad3bc6aaec 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx
@@ -5,7 +5,7 @@
*/
import React, { useMemo, useCallback, useRef } from 'react';
-import { useMappingsState, useDispatch } from '../../../mappings_state';
+import { useMappingsState, useDispatch } from '../../../mappings_state_context';
import { NormalizedField } from '../../../types';
import { FieldsListItem } from './fields_list_item';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx
index 5954f6f285f10..d750c0e604c5e 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx
@@ -6,7 +6,7 @@
import React, { useRef, useCallback } from 'react';
-import { useDispatch } from '../../mappings_state';
+import { useDispatch } from '../../mappings_state_context';
import { JsonEditor } from '../../shared_imports';
export interface Props {
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx
index 9d9df38ef4e25..7a0b72ae647d5 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx
@@ -8,7 +8,7 @@ import React, { useMemo, useCallback } from 'react';
import { EuiButtonEmpty, EuiSpacer } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
-import { useMappingsState, useDispatch } from '../../mappings_state';
+import { useMappingsState, useDispatch } from '../../mappings_state_context';
import { FieldsList, CreateField } from './fields';
export const DocumentFieldsTreeEditor = () => {
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx
index 9077781b7fb43..f3602a800eeeb 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx
@@ -8,9 +8,8 @@ import VirtualList from 'react-tiny-virtual-list';
import { EuiEmptyPrompt, EuiButton } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
-import { SearchResult as SearchResultType } from '../../../types';
-import { useDispatch } from '../../../mappings_state';
-import { State } from '../../../reducer';
+import { SearchResult as SearchResultType, State } from '../../../types';
+import { useDispatch } from '../../../mappings_state_context';
import { SearchResultItem } from './search_result_item';
interface Props {
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx
index ab8b90b6be3b5..73d3e078f6ff3 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx
@@ -10,7 +10,7 @@ import { i18n } from '@kbn/i18n';
import { SearchResult } from '../../../types';
import { TYPE_DEFINITION } from '../../../constants';
-import { useDispatch } from '../../../mappings_state';
+import { useDispatch } from '../../../mappings_state_context';
import { getTypeLabelFromType } from '../../../lib';
import { DeleteFieldProvider } from '../fields/delete_field_provider';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts
index 34c410f06e520..dc7f20f4d026b 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts
@@ -4,5 +4,5 @@
* you may not use this file except in compliance with the Elastic License.
*/
-export * from './load_from_json_button';
-export * from './load_mappings_provider';
+export { LoadMappingsFromJsonButton } from './load_from_json_button';
+export { LoadMappingsProvider } from './load_mappings_provider';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx
index a95579a8a141e..44a809a7a01bf 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx
@@ -9,12 +9,11 @@ import { FormattedMessage } from '@kbn/i18n/react';
import { EuiText, EuiLink, EuiSpacer } from '@elastic/eui';
import { useForm, Form, SerializerFunc, UseField, JsonEditorField } from '../../shared_imports';
-import { Types, useDispatch } from '../../mappings_state';
+import { MappingsTemplates } from '../../types';
+import { useDispatch } from '../../mappings_state_context';
import { templatesFormSchema } from './templates_form_schema';
import { documentationService } from '../../../../services/documentation';
-type MappingsTemplates = Types['MappingsTemplates'];
-
interface Props {
value?: MappingsTemplates;
}
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts
index 667b5685723d2..daca85f95b0b9 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts
@@ -7,7 +7,7 @@
import { i18n } from '@kbn/i18n';
import { FormSchema, fieldValidators } from '../../shared_imports';
-import { MappingsTemplates } from '../../reducer';
+import { MappingsTemplates } from '../../types';
const { isJsonField } = fieldValidators;
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts
index 29cfaf99c6559..00bb41663dd9c 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts
@@ -4,12 +4,12 @@
* you may not use this file except in compliance with the Elastic License.
*/
-export * from './mappings_editor';
+export { MappingsEditor } from './mappings_editor';
// We export both the button & the load mappings provider
// to give flexibility to the consumer
-export * from './components/load_mappings';
+export { LoadMappingsFromJsonButton, LoadMappingsProvider } from './components/load_mappings';
-export { OnUpdateHandler, Types } from './mappings_state';
+export { MappingsEditorProvider } from './mappings_editor_context';
-export { IndexSettings } from './types';
+export { IndexSettings, OnUpdateHandler } from './types';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx
index 9e3637f970293..411193f10b24a 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx
@@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import React, { createContext, useContext } from 'react';
+
import { IndexSettings } from './types';
const IndexSettingsContext = createContext(undefined);
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx
index e8fda90737708..292882f1c5b4b 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx
@@ -14,24 +14,40 @@ import {
TemplatesForm,
MultipleMappingsWarning,
} from './components';
-import { IndexSettings } from './types';
+import {
+ OnUpdateHandler,
+ IndexSettings,
+ Field,
+ Mappings,
+ MappingsConfiguration,
+ MappingsTemplates,
+} from './types';
import { extractMappingsDefinition } from './lib';
-import { State } from './reducer';
-import { MappingsState, Props as MappingsStateProps, Types } from './mappings_state';
+import { useMappingsState } from './mappings_state_context';
+import { useMappingsStateListener } from './use_state_listener';
import { IndexSettingsProvider } from './index_settings_context';
+type TabName = 'fields' | 'advanced' | 'templates';
+
+interface MappingsEditorParsedMetadata {
+ parsedDefaultValue?: {
+ configuration: MappingsConfiguration;
+ fields: { [key: string]: Field };
+ templates: MappingsTemplates;
+ };
+ multipleMappingsDeclared: boolean;
+}
+
interface Props {
- onChange: MappingsStateProps['onChange'];
+ onChange: OnUpdateHandler;
value?: { [key: string]: any };
indexSettings?: IndexSettings;
}
-type TabName = 'fields' | 'advanced' | 'templates';
-
export const MappingsEditor = React.memo(({ onChange, value, indexSettings }: Props) => {
- const [selectedTab, selectTab] = useState('fields');
-
- const { parsedDefaultValue, multipleMappingsDeclared } = useMemo(() => {
+ const { parsedDefaultValue, multipleMappingsDeclared } = useMemo<
+ MappingsEditorParsedMetadata
+ >(() => {
const mappingsDefinition = extractMappingsDefinition(value);
if (mappingsDefinition === null) {
@@ -69,18 +85,28 @@ export const MappingsEditor = React.memo(({ onChange, value, indexSettings }: Pr
return { parsedDefaultValue: parsed, multipleMappingsDeclared: false };
}, [value]);
+ /**
+ * Hook that will listen to:
+ * 1. "value" prop changes in order to reset the mappings editor
+ * 2. "state" changes in order to communicate any updates to the consumer
+ */
+ useMappingsStateListener({ onChange, value: parsedDefaultValue });
+
+ const state = useMappingsState();
+ const [selectedTab, selectTab] = useState('fields');
+
useEffect(() => {
if (multipleMappingsDeclared) {
// We set the data getter here as the user won't be able to make any changes
onChange({
- getData: () => value! as Types['Mappings'],
+ getData: () => value! as Mappings,
validate: () => Promise.resolve(true),
isValid: true,
});
}
}, [multipleMappingsDeclared, onChange, value]);
- const changeTab = async (tab: TabName, state: State) => {
+ const changeTab = async (tab: TabName) => {
if (selectedTab === 'advanced') {
// When we navigate away we need to submit the form to validate if there are any errors.
const { isValid: isConfigurationFormValid } = await state.configuration.submitForm!();
@@ -102,59 +128,53 @@ export const MappingsEditor = React.memo(({ onChange, value, indexSettings }: Pr
selectTab(tab);
};
+ const tabToContentMap = {
+ fields: ,
+ templates: ,
+ advanced: ,
+ };
+
return (
{multipleMappingsDeclared ? (
) : (
-
- {({ state }) => {
- const tabToContentMap = {
- fields: ,
- templates: ,
- advanced: ,
- };
-
- return (
-
-
- changeTab('fields', state)}
- isSelected={selectedTab === 'fields'}
- data-test-subj="formTab"
- >
- {i18n.translate('xpack.idxMgmt.mappingsEditor.fieldsTabLabel', {
- defaultMessage: 'Mapped fields',
- })}
-
- changeTab('templates', state)}
- isSelected={selectedTab === 'templates'}
- data-test-subj="formTab"
- >
- {i18n.translate('xpack.idxMgmt.mappingsEditor.templatesTabLabel', {
- defaultMessage: 'Dynamic templates',
- })}
-
- changeTab('advanced', state)}
- isSelected={selectedTab === 'advanced'}
- data-test-subj="formTab"
- >
- {i18n.translate('xpack.idxMgmt.mappingsEditor.advancedTabLabel', {
- defaultMessage: 'Advanced options',
- })}
-
-
-
-
-
- {tabToContentMap[selectedTab]}
-
- );
- }}
-
+
+
+ changeTab('fields')}
+ isSelected={selectedTab === 'fields'}
+ data-test-subj="formTab"
+ >
+ {i18n.translate('xpack.idxMgmt.mappingsEditor.fieldsTabLabel', {
+ defaultMessage: 'Mapped fields',
+ })}
+
+ changeTab('templates')}
+ isSelected={selectedTab === 'templates'}
+ data-test-subj="formTab"
+ >
+ {i18n.translate('xpack.idxMgmt.mappingsEditor.templatesTabLabel', {
+ defaultMessage: 'Dynamic templates',
+ })}
+
+ changeTab('advanced')}
+ isSelected={selectedTab === 'advanced'}
+ data-test-subj="formTab"
+ >
+ {i18n.translate('xpack.idxMgmt.mappingsEditor.advancedTabLabel', {
+ defaultMessage: 'Advanced options',
+ })}
+
+
+
+
+
+ {tabToContentMap[selectedTab]}
+
)}
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor_context.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor_context.tsx
new file mode 100644
index 0000000000000..596b49cc89ee8
--- /dev/null
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor_context.tsx
@@ -0,0 +1,12 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+import React from 'react';
+
+import { StateProvider } from './mappings_state_context';
+
+export const MappingsEditorProvider: React.FC = ({ children }) => {
+ return {children};
+};
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state_context.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state_context.tsx
new file mode 100644
index 0000000000000..a402dec250056
--- /dev/null
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state_context.tsx
@@ -0,0 +1,77 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React, { useReducer, createContext, useContext } from 'react';
+
+import { reducer } from './reducer';
+import { State, Dispatch } from './types';
+
+const StateContext = createContext(undefined);
+const DispatchContext = createContext(undefined);
+
+export const StateProvider: React.FC = ({ children }) => {
+ const initialState: State = {
+ isValid: true,
+ configuration: {
+ defaultValue: {},
+ data: {
+ raw: {},
+ format: () => ({}),
+ },
+ validate: () => Promise.resolve(true),
+ },
+ templates: {
+ defaultValue: {},
+ data: {
+ raw: {},
+ format: () => ({}),
+ },
+ validate: () => Promise.resolve(true),
+ },
+ fields: {
+ byId: {},
+ rootLevelFields: [],
+ aliases: {},
+ maxNestedDepth: 0,
+ },
+ documentFields: {
+ status: 'idle',
+ editor: 'default',
+ },
+ fieldsJsonEditor: {
+ format: () => ({}),
+ isValid: true,
+ },
+ search: {
+ term: '',
+ result: [],
+ },
+ };
+
+ const [state, dispatch] = useReducer(reducer, initialState);
+
+ return (
+
+ {children}
+
+ );
+};
+
+export const useMappingsState = () => {
+ const ctx = useContext(StateContext);
+ if (ctx === undefined) {
+ throw new Error('useMappingsState must be used within a ');
+ }
+ return ctx;
+};
+
+export const useDispatch = () => {
+ const ctx = useContext(DispatchContext);
+ if (ctx === undefined) {
+ throw new Error('useDispatch must be used within a ');
+ }
+ return ctx;
+};
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts
index 27f8b12493008..18a8270117ea4 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts
@@ -3,8 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import { OnFormUpdateArg, FormHook } from './shared_imports';
-import { Field, NormalizedFields, NormalizedField, FieldsEditor, SearchResult } from './types';
+import { Field, NormalizedFields, NormalizedField, State, Action } from './types';
import {
getFieldMeta,
getUniqueId,
@@ -17,99 +16,6 @@ import {
} from './lib';
import { PARAMETERS_DEFINITION } from './constants';
-export interface MappingsConfiguration {
- enabled?: boolean;
- throwErrorsForUnmappedFields?: boolean;
- date_detection: boolean;
- numeric_detection: boolean;
- dynamic_date_formats: string[];
- _source: {
- enabled?: boolean;
- includes?: string[];
- excludes?: string[];
- };
- _meta?: string;
-}
-
-export interface MappingsTemplates {
- dynamic_templates: DynamicTemplate[];
-}
-
-interface DynamicTemplate {
- [key: string]: {
- mapping: {
- [key: string]: any;
- };
- match_mapping_type?: string;
- match?: string;
- unmatch?: string;
- match_pattern?: string;
- path_match?: string;
- path_unmatch?: string;
- };
-}
-
-export interface MappingsFields {
- [key: string]: any;
-}
-
-type DocumentFieldsStatus = 'idle' | 'editingField' | 'creatingField';
-
-interface DocumentFieldsState {
- status: DocumentFieldsStatus;
- editor: FieldsEditor;
- fieldToEdit?: string;
- fieldToAddFieldTo?: string;
-}
-
-interface ConfigurationFormState extends OnFormUpdateArg {
- defaultValue: MappingsConfiguration;
- submitForm?: FormHook['submit'];
-}
-
-interface TemplatesFormState extends OnFormUpdateArg {
- defaultValue: MappingsTemplates;
- submitForm?: FormHook['submit'];
-}
-
-export interface State {
- isValid: boolean | undefined;
- configuration: ConfigurationFormState;
- documentFields: DocumentFieldsState;
- fields: NormalizedFields;
- fieldForm?: OnFormUpdateArg;
- fieldsJsonEditor: {
- format(): MappingsFields;
- isValid: boolean;
- };
- search: {
- term: string;
- result: SearchResult[];
- };
- templates: TemplatesFormState;
-}
-
-export type Action =
- | { type: 'editor.replaceMappings'; value: { [key: string]: any } }
- | { type: 'configuration.update'; value: Partial }
- | { type: 'configuration.save'; value: MappingsConfiguration }
- | { type: 'templates.update'; value: Partial }
- | { type: 'templates.save'; value: MappingsTemplates }
- | { type: 'fieldForm.update'; value: OnFormUpdateArg }
- | { type: 'field.add'; value: Field }
- | { type: 'field.remove'; value: string }
- | { type: 'field.edit'; value: Field }
- | { type: 'field.toggleExpand'; value: { fieldId: string; isExpanded?: boolean } }
- | { type: 'documentField.createField'; value?: string }
- | { type: 'documentField.editField'; value: string }
- | { type: 'documentField.changeStatus'; value: DocumentFieldsStatus }
- | { type: 'documentField.changeEditor'; value: FieldsEditor }
- | { type: 'fieldsJsonEditor.update'; value: { json: { [key: string]: any }; isValid: boolean } }
- | { type: 'search:update'; value: string }
- | { type: 'validity:update'; value: boolean };
-
-export type Dispatch = (action: Action) => void;
-
export const addFieldToState = (field: Field, state: State): State => {
const updatedFields = { ...state.fields };
const id = getUniqueId();
@@ -277,7 +183,7 @@ export const reducer = (state: State, action: Action): State => {
},
documentFields: {
...state.documentFields,
- status: 'idle',
+ ...action.value.documentFields,
fieldToAddFieldTo: undefined,
fieldToEdit: undefined,
},
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts
index 2979015c07455..097d039527950 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts
@@ -49,4 +49,5 @@ export {
export {
JsonEditor,
OnJsonEditorUpdateHandler,
+ GlobalFlyout,
} from '../../../../../../../src/plugins/es_ui_shared/public';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/document_fields.ts
similarity index 65%
rename from x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts
rename to x-pack/plugins/index_management/public/application/components/mappings_editor/types/document_fields.ts
index 5b18af68ed55b..a9f6d2ea03bdf 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/document_fields.ts
@@ -3,10 +3,12 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import { ReactNode, OptionHTMLAttributes } from 'react';
+import { ReactNode } from 'react';
-import { FieldConfig } from './shared_imports';
-import { PARAMETERS_DEFINITION } from './constants';
+import { GenericObject } from './mappings_editor';
+
+import { FieldConfig } from '../shared_imports';
+import { PARAMETERS_DEFINITION } from '../constants';
export interface DataTypeDefinition {
label: string;
@@ -203,100 +205,7 @@ export interface NormalizedField extends FieldMeta {
export type ChildFieldName = 'properties' | 'fields';
-export type FieldsEditor = 'default' | 'json';
-
-export type SelectOption = {
- value: unknown;
- text: T | ReactNode;
-} & OptionHTMLAttributes;
-
-export interface SuperSelectOption {
- value: unknown;
- inputDisplay?: ReactNode;
- dropdownDisplay?: ReactNode;
- disabled?: boolean;
- 'data-test-subj'?: string;
-}
-
export interface AliasOption {
id: string;
label: string;
}
-
-export interface IndexSettingsInterface {
- analysis?: {
- analyzer: {
- [key: string]: {
- type: string;
- tokenizer: string;
- char_filter?: string[];
- filter?: string[];
- position_increment_gap?: number;
- };
- };
- };
-}
-
-/**
- * When we define the index settings we can skip
- * the "index" property and directly add the "analysis".
- * ES always returns the settings wrapped under "index".
- */
-export type IndexSettings = IndexSettingsInterface | { index: IndexSettingsInterface };
-
-export interface ComboBoxOption {
- label: string;
- value?: unknown;
-}
-
-export interface SearchResult {
- display: JSX.Element;
- field: NormalizedField;
-}
-
-export interface SearchMetadata {
- /**
- * Whether or not the search term match some part of the field path.
- */
- matchPath: boolean;
- /**
- * If the search term matches the field type we will give it a higher score.
- */
- matchType: boolean;
- /**
- * If the last word of the search terms matches the field name
- */
- matchFieldName: boolean;
- /**
- * If the search term matches the beginning of the path we will give it a higher score
- */
- matchStartOfPath: boolean;
- /**
- * If the last word of the search terms fully matches the field name
- */
- fullyMatchFieldName: boolean;
- /**
- * If the search term exactly matches the field type
- */
- fullyMatchType: boolean;
- /**
- * If the search term matches the full field path
- */
- fullyMatchPath: boolean;
- /**
- * The score of the result that will allow us to sort the list
- */
- score: number;
- /**
- * The JSX with tag wrapping the matched string
- */
- display: JSX.Element;
- /**
- * The field path substring that matches the search
- */
- stringMatch: string | null;
-}
-
-export interface GenericObject {
- [key: string]: any;
-}
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/index.ts
new file mode 100644
index 0000000000000..cce2d550a68c1
--- /dev/null
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/index.ts
@@ -0,0 +1,11 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+export * from './mappings_editor';
+
+export * from './document_fields';
+
+export * from './state';
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types/mappings_editor.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/mappings_editor.ts
new file mode 100644
index 0000000000000..1ca944024ae2b
--- /dev/null
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/mappings_editor.ts
@@ -0,0 +1,110 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+import { ReactNode, OptionHTMLAttributes } from 'react';
+
+import { NormalizedField } from './document_fields';
+import { Mappings } from './state';
+
+export type OnUpdateHandler = (arg: OnUpdateHandlerArg) => void;
+
+export interface OnUpdateHandlerArg {
+ isValid?: boolean;
+ getData: () => Mappings | undefined;
+ validate: () => Promise;
+}
+
+export type FieldsEditor = 'default' | 'json';
+
+export interface IndexSettingsInterface {
+ analysis?: {
+ analyzer: {
+ [key: string]: {
+ type: string;
+ tokenizer: string;
+ char_filter?: string[];
+ filter?: string[];
+ position_increment_gap?: number;
+ };
+ };
+ };
+}
+
+/**
+ * When we define the index settings we can skip
+ * the "index" property and directly add the "analysis".
+ * ES always returns the settings wrapped under "index".
+ */
+export type IndexSettings = IndexSettingsInterface | { index: IndexSettingsInterface };
+
+export type SelectOption = {
+ value: unknown;
+ text: T | ReactNode;
+} & OptionHTMLAttributes;
+
+export interface ComboBoxOption {
+ label: string;
+ value?: unknown;
+}
+
+export interface SuperSelectOption {
+ value: unknown;
+ inputDisplay?: ReactNode;
+ dropdownDisplay?: ReactNode;
+ disabled?: boolean;
+ 'data-test-subj'?: string;
+}
+
+export interface SearchResult {
+ display: JSX.Element;
+ field: NormalizedField;
+}
+
+export interface SearchMetadata {
+ /**
+ * Whether or not the search term match some part of the field path.
+ */
+ matchPath: boolean;
+ /**
+ * If the search term matches the field type we will give it a higher score.
+ */
+ matchType: boolean;
+ /**
+ * If the last word of the search terms matches the field name
+ */
+ matchFieldName: boolean;
+ /**
+ * If the search term matches the beginning of the path we will give it a higher score
+ */
+ matchStartOfPath: boolean;
+ /**
+ * If the last word of the search terms fully matches the field name
+ */
+ fullyMatchFieldName: boolean;
+ /**
+ * If the search term exactly matches the field type
+ */
+ fullyMatchType: boolean;
+ /**
+ * If the search term matches the full field path
+ */
+ fullyMatchPath: boolean;
+ /**
+ * The score of the result that will allow us to sort the list
+ */
+ score: number;
+ /**
+ * The JSX with tag wrapping the matched string
+ */
+ display: JSX.Element;
+ /**
+ * The field path substring that matches the search
+ */
+ stringMatch: string | null;
+}
+
+export interface GenericObject {
+ [key: string]: any;
+}
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types/state.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/state.ts
new file mode 100644
index 0000000000000..34df70374aa88
--- /dev/null
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/state.ts
@@ -0,0 +1,107 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { FormHook, OnFormUpdateArg } from '../shared_imports';
+import { Field, NormalizedFields } from './document_fields';
+import { FieldsEditor, SearchResult } from './mappings_editor';
+
+export type Mappings = MappingsTemplates &
+ MappingsConfiguration & {
+ properties?: MappingsFields;
+ };
+
+export interface MappingsConfiguration {
+ enabled?: boolean;
+ throwErrorsForUnmappedFields?: boolean;
+ date_detection?: boolean;
+ numeric_detection?: boolean;
+ dynamic_date_formats?: string[];
+ _source?: {
+ enabled?: boolean;
+ includes?: string[];
+ excludes?: string[];
+ };
+ _meta?: string;
+}
+
+export interface MappingsTemplates {
+ dynamic_templates?: DynamicTemplate[];
+}
+
+export interface DynamicTemplate {
+ [key: string]: {
+ mapping: {
+ [key: string]: any;
+ };
+ match_mapping_type?: string;
+ match?: string;
+ unmatch?: string;
+ match_pattern?: string;
+ path_match?: string;
+ path_unmatch?: string;
+ };
+}
+
+export interface MappingsFields {
+ [key: string]: any;
+}
+
+export type DocumentFieldsStatus = 'idle' | 'editingField' | 'creatingField';
+
+export interface DocumentFieldsState {
+ status: DocumentFieldsStatus;
+ editor: FieldsEditor;
+ fieldToEdit?: string;
+ fieldToAddFieldTo?: string;
+}
+
+export interface ConfigurationFormState extends OnFormUpdateArg {
+ defaultValue: MappingsConfiguration;
+ submitForm?: FormHook['submit'];
+}
+
+interface TemplatesFormState extends OnFormUpdateArg {
+ defaultValue: MappingsTemplates;
+ submitForm?: FormHook['submit'];
+}
+
+export interface State {
+ isValid: boolean | undefined;
+ configuration: ConfigurationFormState;
+ documentFields: DocumentFieldsState;
+ fields: NormalizedFields;
+ fieldForm?: OnFormUpdateArg;
+ fieldsJsonEditor: {
+ format(): MappingsFields;
+ isValid: boolean;
+ };
+ search: {
+ term: string;
+ result: SearchResult[];
+ };
+ templates: TemplatesFormState;
+}
+
+export type Action =
+ | { type: 'editor.replaceMappings'; value: { [key: string]: any } }
+ | { type: 'configuration.update'; value: Partial }
+ | { type: 'configuration.save'; value: MappingsConfiguration }
+ | { type: 'templates.update'; value: Partial }
+ | { type: 'templates.save'; value: MappingsTemplates }
+ | { type: 'fieldForm.update'; value: OnFormUpdateArg }
+ | { type: 'field.add'; value: Field }
+ | { type: 'field.remove'; value: string }
+ | { type: 'field.edit'; value: Field }
+ | { type: 'field.toggleExpand'; value: { fieldId: string; isExpanded?: boolean } }
+ | { type: 'documentField.createField'; value?: string }
+ | { type: 'documentField.editField'; value: string }
+ | { type: 'documentField.changeStatus'; value: DocumentFieldsStatus }
+ | { type: 'documentField.changeEditor'; value: FieldsEditor }
+ | { type: 'fieldsJsonEditor.update'; value: { json: { [key: string]: any }; isValid: boolean } }
+ | { type: 'search:update'; value: string }
+ | { type: 'validity:update'; value: boolean };
+
+export type Dispatch = (action: Action) => void;
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/use_state_listener.tsx
similarity index 53%
rename from x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state.tsx
rename to x-pack/plugins/index_management/public/application/components/mappings_editor/use_state_listener.tsx
index ad5056fa73ce1..f1ffd5356c977 100644
--- a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state.tsx
+++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/use_state_listener.tsx
@@ -3,92 +3,32 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-
-import React, { useReducer, useEffect, createContext, useContext, useMemo, useRef } from 'react';
+import { useEffect, useMemo } from 'react';
import {
- reducer,
+ Field,
+ Mappings,
MappingsConfiguration,
- MappingsFields,
MappingsTemplates,
- State,
- Dispatch,
-} from './reducer';
-import { Field } from './types';
+ OnUpdateHandler,
+} from './types';
import { normalize, deNormalize, stripUndefinedValues } from './lib';
+import { useMappingsState, useDispatch } from './mappings_state_context';
-type Mappings = MappingsTemplates &
- MappingsConfiguration & {
- properties?: MappingsFields;
- };
-
-export interface Types {
- Mappings: Mappings;
- MappingsConfiguration: MappingsConfiguration;
- MappingsFields: MappingsFields;
- MappingsTemplates: MappingsTemplates;
-}
-
-export interface OnUpdateHandlerArg {
- isValid?: boolean;
- getData: () => Mappings | undefined;
- validate: () => Promise;
-}
-
-export type OnUpdateHandler = (arg: OnUpdateHandlerArg) => void;
-
-const StateContext = createContext(undefined);
-const DispatchContext = createContext(undefined);
-
-export interface Props {
- children: (params: { state: State }) => React.ReactNode;
- value: {
+interface Args {
+ onChange: OnUpdateHandler;
+ value?: {
templates: MappingsTemplates;
configuration: MappingsConfiguration;
fields: { [key: string]: Field };
};
- onChange: OnUpdateHandler;
}
-export const MappingsState = React.memo(({ children, onChange, value }: Props) => {
- const didMountRef = useRef(false);
+export const useMappingsStateListener = ({ onChange, value }: Args) => {
+ const state = useMappingsState();
+ const dispatch = useDispatch();
- const parsedFieldsDefaultValue = useMemo(() => normalize(value.fields), [value.fields]);
-
- const initialState: State = {
- isValid: true,
- configuration: {
- defaultValue: value.configuration,
- data: {
- raw: value.configuration,
- format: () => value.configuration,
- },
- validate: () => Promise.resolve(true),
- },
- templates: {
- defaultValue: value.templates,
- data: {
- raw: value.templates,
- format: () => value.templates,
- },
- validate: () => Promise.resolve(true),
- },
- fields: parsedFieldsDefaultValue,
- documentFields: {
- status: parsedFieldsDefaultValue.rootLevelFields.length === 0 ? 'creatingField' : 'idle',
- editor: 'default',
- },
- fieldsJsonEditor: {
- format: () => ({}),
- isValid: true,
- },
- search: {
- term: '',
- result: [],
- },
- };
-
- const [state, dispatch] = useReducer(reducer, initialState);
+ const parsedFieldsDefaultValue = useMemo(() => normalize(value?.fields), [value?.fields]);
useEffect(() => {
// If we are creating a new field, but haven't entered any name
@@ -158,46 +98,28 @@ export const MappingsState = React.memo(({ children, onChange, value }: Props) =
},
isValid: state.isValid,
});
- }, [state, onChange]);
+ }, [state, onChange, dispatch]);
useEffect(() => {
/**
* If the value has changed that probably means that we have loaded
* new data from JSON. We need to update our state with the new mappings.
*/
- if (didMountRef.current) {
- dispatch({
- type: 'editor.replaceMappings',
- value: {
- configuration: value.configuration,
- templates: value.templates,
- fields: parsedFieldsDefaultValue,
- },
- });
- } else {
- didMountRef.current = true;
+ if (value === undefined) {
+ return;
}
- }, [value, parsedFieldsDefaultValue]);
-
- return (
-
- {children({ state })}
-
- );
-});
-
-export const useMappingsState = () => {
- const ctx = useContext(StateContext);
- if (ctx === undefined) {
- throw new Error('useMappingsState must be used within a ');
- }
- return ctx;
-};
-export const useDispatch = () => {
- const ctx = useContext(DispatchContext);
- if (ctx === undefined) {
- throw new Error('useDispatch must be used within a ');
- }
- return ctx;
+ dispatch({
+ type: 'editor.replaceMappings',
+ value: {
+ configuration: value.configuration,
+ templates: value.templates,
+ fields: parsedFieldsDefaultValue,
+ documentFields: {
+ status: parsedFieldsDefaultValue.rootLevelFields.length === 0 ? 'creatingField' : 'idle',
+ editor: 'default',
+ },
+ },
+ });
+ }, [value, parsedFieldsDefaultValue, dispatch]);
};
diff --git a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx
index df0cc791384fe..ae831f4acf7ee 100644
--- a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx
+++ b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx
@@ -39,7 +39,7 @@ const i18nTexts = {
),
};
-export const StepComponents = ({ defaultValue = [], onChange, esDocsBase }: Props) => {
+export const StepComponents = ({ defaultValue, onChange, esDocsBase }: Props) => {
const [state, setState] = useState<{
isLoadingComponents: boolean;
components: ComponentTemplateListItem[];
diff --git a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx
index f3d05ac38108a..fcc9795617ebb 100644
--- a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx
+++ b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx
@@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useEffect } from 'react';
+import React, { useEffect, useCallback } from 'react';
import {
EuiFlexGroup,
EuiFlexItem,
@@ -153,25 +153,18 @@ export const StepLogistics: React.FunctionComponent = React.memo(
serializer: formSerializer,
deserializer: formDeserializer,
});
+ const { subscribe, submit, isSubmitted, isValid: isFormValid, getErrors: getFormErrors } = form;
/**
* When the consumer call validate() on this step, we submit the form so it enters the "isSubmitted" state
* and we can display the form errors on top of the forms if there are any.
*/
- const validate = async () => {
- return (await form.submit()).isValid;
- };
+ const validate = useCallback(async () => {
+ return (await submit()).isValid;
+ }, [submit]);
useEffect(() => {
- onChange({
- isValid: form.isValid,
- validate,
- getData: form.getFormData,
- });
- }, [form.isValid, onChange]); // eslint-disable-line react-hooks/exhaustive-deps
-
- useEffect(() => {
- const subscription = form.subscribe(({ data, isValid }) => {
+ const subscription = subscribe(({ data, isValid }) => {
onChange({
isValid,
validate,
@@ -179,7 +172,7 @@ export const StepLogistics: React.FunctionComponent = React.memo(
});
});
return subscription.unsubscribe;
- }, [onChange]); // eslint-disable-line react-hooks/exhaustive-deps
+ }, [onChange, validate, subscribe]);
const { name, indexPatterns, dataStream, order, priority, version } = getFieldsMeta(
documentationService.getEsDocsBase()
@@ -204,7 +197,7 @@ export const StepLogistics: React.FunctionComponent = React.memo(
@@ -220,8 +213,8 @@ export const StepLogistics: React.FunctionComponent = React.memo(